feat: Add voice input service with speech-to-text

Voice Service (lib/services/voice_service.dart):
- speech_to_text package integration
- Start/stop listening with streaming partial results
- Language selection (en_US default, configurable)
- Error handling (no mic, permission denied, timeout)
- Sound level monitoring for visualizations
- Continuous dictation mode support
- Stream-based reactive API for status/transcript/levels

Voice Input Widget (lib/features/input/voice_input_widget.dart):
- Animated mic button with pulse effect while listening
- Real-time transcript display with copy button
- Waveform visualization responding to sound levels
- Tap to start/stop interactions
- Haptic feedback on interactions
- Compact mode for inline use
- Error display with auto-dismiss

Platform permissions:
- Android: RECORD_AUDIO permission
- iOS: NSMicrophoneUsageDescription, NSSpeechRecognitionUsageDescription

Input screen integration with voice toggle and continuous mode
This commit is contained in:
Johan Jongsma 2026-01-31 19:44:11 +00:00
parent 9cc40d0765
commit e8103f1ee9
15 changed files with 3011 additions and 230 deletions

View File

@ -1,4 +1,8 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<!-- Permissions for voice input -->
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.INTERNET"/>
<application
android:label="inou_mobile"
android:name="${applicationName}"

View File

@ -45,5 +45,9 @@
<true/>
<key>UIApplicationSupportsIndirectInputEvents</key>
<true/>
<key>NSSpeechRecognitionUsageDescription</key>
<string>This app uses speech recognition to convert your voice to text.</string>
<key>NSMicrophoneUsageDescription</key>
<string>This app needs microphone access for voice input and speech-to-text features.</string>
</dict>
</plist>

View File

@ -1,41 +0,0 @@
import 'package:local_auth/local_auth.dart';
/// Authentication service for biometrics
class AuthService {
final LocalAuthentication _localAuth = LocalAuthentication();
/// Check if biometrics are available
Future<bool> isBiometricsAvailable() async {
try {
final canCheckBiometrics = await _localAuth.canCheckBiometrics;
final isDeviceSupported = await _localAuth.isDeviceSupported();
return canCheckBiometrics && isDeviceSupported;
} catch (e) {
return false;
}
}
/// Authenticate using biometrics
Future<bool> authenticate({String reason = 'Please authenticate to continue'}) async {
try {
return await _localAuth.authenticate(
localizedReason: reason,
options: const AuthenticationOptions(
stickyAuth: true,
biometricOnly: false,
),
);
} catch (e) {
return false;
}
}
/// Get available biometric types
Future<List<BiometricType>> getAvailableBiometrics() async {
try {
return await _localAuth.getAvailableBiometrics();
} catch (e) {
return [];
}
}
}

390
lib/core/auth_gate.dart Normal file
View File

@ -0,0 +1,390 @@
import 'package:flutter/material.dart';
import '../services/biometric_service.dart';
import 'theme.dart';
/// Widget that wraps the app and handles biometric authentication
/// Shows biometric prompt on app launch and resume from background
class AuthGate extends StatefulWidget {
final Widget child;
const AuthGate({
super.key,
required this.child,
});
@override
State<AuthGate> createState() => _AuthGateState();
}
class _AuthGateState extends State<AuthGate> with WidgetsBindingObserver {
final BiometricService _biometricService = BiometricService();
bool _isLocked = true;
bool _isAuthenticating = false;
bool _isInitialized = false;
BiometricResult? _lastError;
int _failureCount = 0;
static const int _maxFailures = 3;
@override
void initState() {
super.initState();
WidgetsBinding.instance.addObserver(this);
_initialize();
}
@override
void dispose() {
WidgetsBinding.instance.removeObserver(this);
super.dispose();
}
Future<void> _initialize() async {
final required = await _biometricService.isAuthenticationRequired();
setState(() {
_isLocked = required;
_isInitialized = true;
});
if (required) {
// Small delay to ensure UI is ready
await Future.delayed(const Duration(milliseconds: 300));
_authenticate();
}
}
@override
void didChangeAppLifecycleState(AppLifecycleState state) {
super.didChangeAppLifecycleState(state);
switch (state) {
case AppLifecycleState.resumed:
_onAppResumed();
break;
case AppLifecycleState.paused:
case AppLifecycleState.inactive:
case AppLifecycleState.hidden:
_onAppBackgrounded();
break;
case AppLifecycleState.detached:
break;
}
}
Future<void> _onAppResumed() async {
if (_isAuthenticating) return;
final required = await _biometricService.isAuthenticationRequired();
if (required && !_isLocked) {
setState(() {
_isLocked = true;
_lastError = null;
_failureCount = 0;
});
_authenticate();
} else if (_isLocked && !_isAuthenticating) {
// Still locked, try again
_authenticate();
}
}
void _onAppBackgrounded() {
// Record last activity time when going to background
_biometricService.recordActivity();
}
Future<void> _authenticate() async {
if (_isAuthenticating) return;
setState(() {
_isAuthenticating = true;
_lastError = null;
});
final result = await _biometricService.authenticate(
reason: 'Authenticate to access inou',
biometricOnly: false, // Allow PIN fallback
);
if (!mounted) return;
setState(() {
_isAuthenticating = false;
});
switch (result) {
case BiometricResult.success:
setState(() {
_isLocked = false;
_failureCount = 0;
_lastError = null;
});
break;
case BiometricResult.cancelled:
// User cancelled, don't count as failure
setState(() {
_lastError = result;
});
break;
case BiometricResult.failed:
setState(() {
_failureCount++;
_lastError = result;
});
break;
case BiometricResult.lockedOut:
case BiometricResult.permanentlyLockedOut:
case BiometricResult.notAvailable:
case BiometricResult.notEnrolled:
case BiometricResult.error:
setState(() {
_lastError = result;
});
break;
}
}
void _recordUserActivity() {
_biometricService.recordActivity();
}
@override
Widget build(BuildContext context) {
// Not yet initialized - show nothing (brief flash)
if (!_isInitialized) {
return const SizedBox.shrink();
}
// Not locked - show the app
if (!_isLocked) {
return GestureDetector(
behavior: HitTestBehavior.translucent,
onTap: _recordUserActivity,
onPanDown: (_) => _recordUserActivity(),
child: widget.child,
);
}
// Locked - show auth screen
return _buildLockScreen();
}
Widget _buildLockScreen() {
return Scaffold(
backgroundColor: AppTheme.backgroundColor,
body: SafeArea(
child: Center(
child: Padding(
padding: const EdgeInsets.all(32.0),
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: [
// App icon/logo
Container(
width: 100,
height: 100,
decoration: BoxDecoration(
color: AppTheme.surfaceColor,
borderRadius: BorderRadius.circular(24),
border: Border.all(
color: AppTheme.primaryColor.withOpacity(0.3),
width: 2,
),
),
child: Icon(
Icons.lock_outline,
size: 48,
color: AppTheme.primaryColor,
),
),
const SizedBox(height: 32),
Text(
'inou',
style: TextStyle(
fontSize: 32,
fontWeight: FontWeight.bold,
color: AppTheme.textColor,
),
),
const SizedBox(height: 8),
Text(
'Authentication Required',
style: TextStyle(
fontSize: 16,
color: AppTheme.textColor.withOpacity(0.7),
),
),
const SizedBox(height: 48),
// Error message
if (_lastError != null) ...[
Container(
padding: const EdgeInsets.all(16),
decoration: BoxDecoration(
color: _getErrorColor(_lastError!).withOpacity(0.1),
borderRadius: BorderRadius.circular(12),
border: Border.all(
color: _getErrorColor(_lastError!).withOpacity(0.3),
),
),
child: Row(
children: [
Icon(
_getErrorIcon(_lastError!),
color: _getErrorColor(_lastError!),
size: 24,
),
const SizedBox(width: 12),
Expanded(
child: Text(
_biometricService.getErrorMessage(_lastError!),
style: TextStyle(
color: _getErrorColor(_lastError!),
fontSize: 14,
),
),
),
],
),
),
const SizedBox(height: 24),
],
// Authenticate button
if (!_isAuthenticating) ...[
SizedBox(
width: double.infinity,
height: 56,
child: ElevatedButton.icon(
onPressed: _failureCount >= _maxFailures
? null
: _authenticate,
icon: const Icon(Icons.fingerprint, size: 28),
label: Text(
_failureCount >= _maxFailures
? 'Too many attempts'
: 'Authenticate',
style: const TextStyle(fontSize: 18),
),
style: ElevatedButton.styleFrom(
backgroundColor: AppTheme.primaryColor,
foregroundColor: Colors.white,
disabledBackgroundColor: Colors.grey.shade800,
disabledForegroundColor: Colors.grey.shade500,
shape: RoundedRectangleBorder(
borderRadius: BorderRadius.circular(16),
),
),
),
),
] else ...[
// Authenticating indicator
Column(
children: [
SizedBox(
width: 48,
height: 48,
child: CircularProgressIndicator(
valueColor: AlwaysStoppedAnimation<Color>(
AppTheme.primaryColor,
),
strokeWidth: 3,
),
),
const SizedBox(height: 16),
Text(
'Authenticating...',
style: TextStyle(
color: AppTheme.textColor.withOpacity(0.7),
fontSize: 16,
),
),
],
),
],
// Failure count indicator
if (_failureCount > 0 && _failureCount < _maxFailures) ...[
const SizedBox(height: 16),
Text(
'${_maxFailures - _failureCount} attempts remaining',
style: TextStyle(
color: Colors.orange.shade400,
fontSize: 14,
),
),
],
// Reset after max failures
if (_failureCount >= _maxFailures) ...[
const SizedBox(height: 24),
TextButton(
onPressed: () {
setState(() {
_failureCount = 0;
_lastError = null;
});
},
child: Text(
'Try Again',
style: TextStyle(
color: AppTheme.primaryColor,
fontSize: 16,
),
),
),
],
],
),
),
),
),
);
}
Color _getErrorColor(BiometricResult result) {
switch (result) {
case BiometricResult.cancelled:
return Colors.grey;
case BiometricResult.failed:
return Colors.orange;
case BiometricResult.lockedOut:
case BiometricResult.permanentlyLockedOut:
return Colors.red;
case BiometricResult.notAvailable:
case BiometricResult.notEnrolled:
return Colors.amber;
default:
return Colors.red;
}
}
IconData _getErrorIcon(BiometricResult result) {
switch (result) {
case BiometricResult.cancelled:
return Icons.cancel_outlined;
case BiometricResult.failed:
return Icons.error_outline;
case BiometricResult.lockedOut:
case BiometricResult.permanentlyLockedOut:
return Icons.lock_clock;
case BiometricResult.notAvailable:
return Icons.no_encryption;
case BiometricResult.notEnrolled:
return Icons.fingerprint;
default:
return Icons.warning_amber;
}
}
}

View File

@ -1,5 +1,6 @@
import 'package:flutter/material.dart';
import '../../core/theme.dart';
import 'voice_input_widget.dart';
/// Fancy input screen with OCR, voice, and camera capabilities
class InputScreen extends StatefulWidget {
@ -11,6 +12,8 @@ class InputScreen extends StatefulWidget {
class _InputScreenState extends State<InputScreen> {
final TextEditingController _textController = TextEditingController();
bool _showVoiceInput = false;
bool _continuousDictation = false;
@override
void dispose() {
@ -24,6 +27,22 @@ class _InputScreenState extends State<InputScreen> {
appBar: AppBar(
title: const Text('Input'),
centerTitle: true,
actions: [
// Continuous dictation toggle
if (_showVoiceInput)
IconButton(
icon: Icon(
_continuousDictation ? Icons.repeat_on : Icons.repeat,
color: _continuousDictation
? AppTheme.primaryColor
: Colors.grey,
),
onPressed: () {
setState(() => _continuousDictation = !_continuousDictation);
},
tooltip: 'Continuous dictation',
),
],
),
body: Padding(
padding: const EdgeInsets.all(16.0),
@ -59,6 +78,12 @@ class _InputScreenState extends State<InputScreen> {
const SizedBox(height: 16),
// Voice input overlay
if (_showVoiceInput) ...[
_buildVoiceInputSection(),
const SizedBox(height: 16),
],
// Action buttons row
Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
@ -74,9 +99,10 @@ class _InputScreenState extends State<InputScreen> {
onTap: _onOcrTap,
),
_buildActionButton(
icon: Icons.mic,
label: 'Voice',
icon: _showVoiceInput ? Icons.keyboard : Icons.mic,
label: _showVoiceInput ? 'Keyboard' : 'Voice',
onTap: _onVoiceTap,
isActive: _showVoiceInput,
),
_buildActionButton(
icon: Icons.send,
@ -92,31 +118,87 @@ class _InputScreenState extends State<InputScreen> {
);
}
Widget _buildVoiceInputSection() {
return AnimatedContainer(
duration: const Duration(milliseconds: 300),
curve: Curves.easeInOut,
padding: const EdgeInsets.all(16),
decoration: BoxDecoration(
color: AppTheme.surfaceColor,
borderRadius: BorderRadius.circular(16),
border: Border.all(
color: AppTheme.primaryColor.withOpacity(0.3),
),
),
child: VoiceInputWidget(
continuousMode: _continuousDictation,
showTranscript: false, // We show in main text field
buttonSize: 64,
onTranscript: (text) {
// Update text field with partial results
_textController.text = text;
_textController.selection = TextSelection.fromPosition(
TextPosition(offset: text.length),
);
},
onFinalResult: (text) {
// Insert at cursor position for continuous mode
if (_continuousDictation && _textController.text.isNotEmpty) {
final currentText = _textController.text;
if (!currentText.endsWith(' ') && !text.startsWith(' ')) {
_textController.text = '$currentText $text';
} else {
_textController.text = currentText + text;
}
} else {
_textController.text = text;
}
_textController.selection = TextSelection.fromPosition(
TextPosition(offset: _textController.text.length),
);
},
),
);
}
Widget _buildActionButton({
required IconData icon,
required String label,
required VoidCallback onTap,
bool isPrimary = false,
bool isActive = false,
}) {
return InkWell(
onTap: onTap,
borderRadius: BorderRadius.circular(12),
child: Container(
child: AnimatedContainer(
duration: const Duration(milliseconds: 200),
padding: const EdgeInsets.symmetric(horizontal: 16, vertical: 12),
decoration: BoxDecoration(
color: isPrimary ? AppTheme.primaryColor : AppTheme.surfaceColor,
color: isPrimary
? AppTheme.primaryColor
: isActive
? AppTheme.primaryColor.withOpacity(0.2)
: AppTheme.surfaceColor,
borderRadius: BorderRadius.circular(12),
border: isActive
? Border.all(color: AppTheme.primaryColor, width: 2)
: null,
),
child: Column(
mainAxisSize: MainAxisSize.min,
children: [
Icon(icon, color: AppTheme.textColor),
Icon(
icon,
color: isActive ? AppTheme.primaryColor : AppTheme.textColor,
),
const SizedBox(height: 4),
Text(
label,
style: const TextStyle(
color: AppTheme.textColor,
style: TextStyle(
color: isActive ? AppTheme.primaryColor : AppTheme.textColor,
fontSize: 12,
fontWeight: isActive ? FontWeight.w600 : FontWeight.normal,
),
),
],
@ -140,10 +222,7 @@ class _InputScreenState extends State<InputScreen> {
}
void _onVoiceTap() {
// TODO: Implement voice input
ScaffoldMessenger.of(context).showSnackBar(
const SnackBar(content: Text('Voice input - Coming soon')),
);
setState(() => _showVoiceInput = !_showVoiceInput);
}
void _onSendTap() {

View File

@ -0,0 +1,722 @@
import 'dart:async';
import 'dart:io';
import 'package:camera/camera.dart';
import 'package:flutter/material.dart';
import 'package:permission_handler/permission_handler.dart';
import '../../core/theme.dart';
import '../../services/ocr_service.dart';
/// Result returned from OCR capture screen
class OcrCaptureResult {
final String text;
final List<OcrTextBlock> blocks;
final String? imagePath;
OcrCaptureResult({
required this.text,
required this.blocks,
this.imagePath,
});
}
/// OCR Camera Capture Screen
///
/// Features:
/// - Camera preview with text detection overlay
/// - Live preview scanning (optional)
/// - Photo capture with OCR
/// - Confirm/retake flow
/// - Returns extracted text to caller
class OcrCaptureScreen extends StatefulWidget {
/// Enable live preview scanning (shows detected text as you scan)
final bool enableLivePreview;
/// Keep the captured image file (otherwise deleted after OCR)
final bool keepImage;
const OcrCaptureScreen({
super.key,
this.enableLivePreview = true,
this.keepImage = false,
});
@override
State<OcrCaptureScreen> createState() => _OcrCaptureScreenState();
}
class _OcrCaptureScreenState extends State<OcrCaptureScreen>
with WidgetsBindingObserver {
CameraController? _cameraController;
List<CameraDescription> _cameras = [];
bool _isInitializing = true;
bool _isCapturing = false;
bool _hasPermission = false;
String? _errorMessage;
// OCR
final OcrService _ocrService = OcrService();
OcrResult? _liveResult;
Timer? _liveProcessingTimer;
bool _isLiveProcessing = false;
// Captured state
String? _capturedImagePath;
OcrResult? _capturedResult;
bool _isProcessingCapture = false;
@override
void initState() {
super.initState();
WidgetsBinding.instance.addObserver(this);
_initializeCamera();
}
@override
void dispose() {
WidgetsBinding.instance.removeObserver(this);
_stopLiveProcessing();
_cameraController?.dispose();
_ocrService.dispose();
// Clean up temp image if not keeping
if (!widget.keepImage && _capturedImagePath != null) {
File(_capturedImagePath!).delete().ignore();
}
super.dispose();
}
@override
void didChangeAppLifecycleState(AppLifecycleState state) {
if (_cameraController == null || !_cameraController!.value.isInitialized) {
return;
}
if (state == AppLifecycleState.inactive) {
_stopLiveProcessing();
_cameraController?.dispose();
} else if (state == AppLifecycleState.resumed) {
_initializeCamera();
}
}
Future<void> _initializeCamera() async {
setState(() {
_isInitializing = true;
_errorMessage = null;
});
// Check camera permission
final status = await Permission.camera.request();
if (!status.isGranted) {
setState(() {
_hasPermission = false;
_isInitializing = false;
_errorMessage = 'Camera permission is required for OCR scanning';
});
return;
}
setState(() => _hasPermission = true);
try {
_cameras = await availableCameras();
if (_cameras.isEmpty) {
setState(() {
_errorMessage = 'No cameras available';
_isInitializing = false;
});
return;
}
// Use back camera for document scanning
final camera = _cameras.firstWhere(
(c) => c.lensDirection == CameraLensDirection.back,
orElse: () => _cameras.first,
);
_cameraController = CameraController(
camera,
ResolutionPreset.high,
enableAudio: false,
imageFormatGroup: Platform.isAndroid
? ImageFormatGroup.nv21
: ImageFormatGroup.bgra8888,
);
await _cameraController!.initialize();
// Start live preview if enabled
if (widget.enableLivePreview) {
_startLiveProcessing();
}
if (mounted) {
setState(() => _isInitializing = false);
}
} catch (e) {
setState(() {
_errorMessage = 'Failed to initialize camera: $e';
_isInitializing = false;
});
}
}
void _startLiveProcessing() {
if (!widget.enableLivePreview || _cameraController == null) return;
// Process frames at ~2fps to avoid overloading
_liveProcessingTimer = Timer.periodic(
const Duration(milliseconds: 500),
(_) => _processLiveFrame(),
);
}
void _stopLiveProcessing() {
_liveProcessingTimer?.cancel();
_liveProcessingTimer = null;
}
Future<void> _processLiveFrame() async {
if (_isLiveProcessing ||
_cameraController == null ||
!_cameraController!.value.isInitialized ||
_capturedImagePath != null) {
return;
}
_isLiveProcessing = true;
try {
// Capture a frame for processing
final image = await _cameraController!.takePicture();
final result = await _ocrService.processImageFile(image.path);
// Clean up temp file
File(image.path).delete().ignore();
if (mounted && _capturedImagePath == null) {
setState(() => _liveResult = result);
}
} catch (e) {
debugPrint('Live OCR error: $e');
} finally {
_isLiveProcessing = false;
}
}
Future<void> _captureAndProcess() async {
if (_isCapturing || _cameraController == null) return;
setState(() {
_isCapturing = true;
_isProcessingCapture = true;
});
_stopLiveProcessing();
try {
final image = await _cameraController!.takePicture();
_capturedImagePath = image.path;
final result = await _ocrService.processImageFile(image.path);
setState(() {
_capturedResult = result;
_isCapturing = false;
_isProcessingCapture = false;
});
} catch (e) {
setState(() {
_errorMessage = 'Failed to capture: $e';
_isCapturing = false;
_isProcessingCapture = false;
});
}
}
void _retake() {
// Clean up captured image
if (_capturedImagePath != null) {
File(_capturedImagePath!).delete().ignore();
}
setState(() {
_capturedImagePath = null;
_capturedResult = null;
_liveResult = null;
});
// Restart live processing
if (widget.enableLivePreview) {
_startLiveProcessing();
}
}
void _confirm() {
if (_capturedResult == null) return;
Navigator.of(context).pop(OcrCaptureResult(
text: _capturedResult!.fullText,
blocks: _capturedResult!.blocks,
imagePath: widget.keepImage ? _capturedImagePath : null,
));
}
@override
Widget build(BuildContext context) {
return Scaffold(
backgroundColor: Colors.black,
appBar: AppBar(
backgroundColor: Colors.black,
foregroundColor: Colors.white,
title: const Text('Scan Text'),
leading: IconButton(
icon: const Icon(Icons.close),
onPressed: () => Navigator.of(context).pop(),
),
),
body: _buildBody(),
);
}
Widget _buildBody() {
if (_isInitializing) {
return const Center(
child: Column(
mainAxisSize: MainAxisSize.min,
children: [
CircularProgressIndicator(color: AppTheme.primaryColor),
SizedBox(height: 16),
Text(
'Initializing camera...',
style: TextStyle(color: Colors.white),
),
],
),
);
}
if (!_hasPermission) {
return _buildPermissionDenied();
}
if (_errorMessage != null) {
return _buildError();
}
if (_capturedImagePath != null) {
return _buildCapturedPreview();
}
return _buildCameraPreview();
}
Widget _buildPermissionDenied() {
return Center(
child: Padding(
padding: const EdgeInsets.all(32),
child: Column(
mainAxisSize: MainAxisSize.min,
children: [
const Icon(
Icons.camera_alt_outlined,
size: 64,
color: Colors.grey,
),
const SizedBox(height: 16),
const Text(
'Camera Permission Required',
style: TextStyle(
color: Colors.white,
fontSize: 18,
fontWeight: FontWeight.bold,
),
),
const SizedBox(height: 8),
const Text(
'Please grant camera permission to scan text from documents.',
textAlign: TextAlign.center,
style: TextStyle(color: Colors.grey),
),
const SizedBox(height: 24),
ElevatedButton.icon(
onPressed: () => openAppSettings(),
icon: const Icon(Icons.settings),
label: const Text('Open Settings'),
style: ElevatedButton.styleFrom(
backgroundColor: AppTheme.primaryColor,
foregroundColor: Colors.white,
),
),
],
),
),
);
}
Widget _buildError() {
return Center(
child: Padding(
padding: const EdgeInsets.all(32),
child: Column(
mainAxisSize: MainAxisSize.min,
children: [
const Icon(Icons.error_outline, size: 64, color: Colors.red),
const SizedBox(height: 16),
Text(
_errorMessage!,
textAlign: TextAlign.center,
style: const TextStyle(color: Colors.white),
),
const SizedBox(height: 24),
ElevatedButton.icon(
onPressed: _initializeCamera,
icon: const Icon(Icons.refresh),
label: const Text('Retry'),
style: ElevatedButton.styleFrom(
backgroundColor: AppTheme.primaryColor,
foregroundColor: Colors.white,
),
),
],
),
),
);
}
Widget _buildCameraPreview() {
if (_cameraController == null || !_cameraController!.value.isInitialized) {
return const SizedBox.shrink();
}
return Stack(
fit: StackFit.expand,
children: [
// Camera preview
Center(
child: AspectRatio(
aspectRatio: 1 / _cameraController!.value.aspectRatio,
child: CameraPreview(_cameraController!),
),
),
// Live text overlay
if (_liveResult != null && _liveResult!.hasText)
_buildTextOverlay(_liveResult!),
// Scanning guide
if (_liveResult == null || !_liveResult!.hasText)
_buildScanningGuide(),
// Live text count indicator
if (_liveResult != null && _liveResult!.hasText)
Positioned(
top: 16,
left: 16,
right: 16,
child: Container(
padding: const EdgeInsets.symmetric(horizontal: 12, vertical: 8),
decoration: BoxDecoration(
color: Colors.black54,
borderRadius: BorderRadius.circular(8),
),
child: Row(
children: [
const Icon(Icons.text_fields, color: Colors.green, size: 20),
const SizedBox(width: 8),
Text(
'${_liveResult!.wordCount} words detected',
style: const TextStyle(color: Colors.white),
),
],
),
),
),
// Capture button
Positioned(
bottom: 32,
left: 0,
right: 0,
child: Center(
child: _buildCaptureButton(),
),
),
// Hint
Positioned(
bottom: 120,
left: 16,
right: 16,
child: Text(
'Point at text and tap to capture',
textAlign: TextAlign.center,
style: TextStyle(
color: Colors.white.withOpacity(0.8),
fontSize: 14,
),
),
),
],
);
}
Widget _buildTextOverlay(OcrResult result) {
return LayoutBuilder(
builder: (context, constraints) {
// Calculate scale factors
final previewSize = Size(constraints.maxWidth, constraints.maxHeight);
final imageSize = result.imageSize;
final scaleX = previewSize.width / imageSize.width;
final scaleY = previewSize.height / imageSize.height;
final scale = scaleX < scaleY ? scaleX : scaleY;
final offsetX = (previewSize.width - imageSize.width * scale) / 2;
final offsetY = (previewSize.height - imageSize.height * scale) / 2;
return Stack(
children: result.blocks.map((block) {
final rect = Rect.fromLTRB(
block.boundingBox.left * scale + offsetX,
block.boundingBox.top * scale + offsetY,
block.boundingBox.right * scale + offsetX,
block.boundingBox.bottom * scale + offsetY,
);
return Positioned(
left: rect.left,
top: rect.top,
width: rect.width,
height: rect.height,
child: Container(
decoration: BoxDecoration(
border: Border.all(
color: AppTheme.primaryColor.withOpacity(0.8),
width: 2,
),
color: AppTheme.primaryColor.withOpacity(0.1),
),
),
);
}).toList(),
);
},
);
}
Widget _buildScanningGuide() {
return Center(
child: Container(
width: 280,
height: 180,
decoration: BoxDecoration(
border: Border.all(
color: Colors.white.withOpacity(0.5),
width: 2,
),
borderRadius: BorderRadius.circular(12),
),
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: [
Icon(
Icons.document_scanner_outlined,
size: 48,
color: Colors.white.withOpacity(0.7),
),
const SizedBox(height: 8),
Text(
'Position text here',
style: TextStyle(
color: Colors.white.withOpacity(0.7),
fontSize: 14,
),
),
],
),
),
);
}
Widget _buildCaptureButton() {
return GestureDetector(
onTap: _isCapturing ? null : _captureAndProcess,
child: Container(
width: 72,
height: 72,
decoration: BoxDecoration(
shape: BoxShape.circle,
border: Border.all(color: Colors.white, width: 4),
),
child: Container(
margin: const EdgeInsets.all(4),
decoration: BoxDecoration(
shape: BoxShape.circle,
color: _isCapturing ? Colors.grey : Colors.white,
),
child: _isCapturing
? const Center(
child: SizedBox(
width: 24,
height: 24,
child: CircularProgressIndicator(
strokeWidth: 2,
color: AppTheme.primaryColor,
),
),
)
: null,
),
),
);
}
Widget _buildCapturedPreview() {
return Stack(
fit: StackFit.expand,
children: [
// Captured image
if (_capturedImagePath != null)
Image.file(
File(_capturedImagePath!),
fit: BoxFit.contain,
),
// Processing indicator
if (_isProcessingCapture)
Container(
color: Colors.black54,
child: const Center(
child: Column(
mainAxisSize: MainAxisSize.min,
children: [
CircularProgressIndicator(color: AppTheme.primaryColor),
SizedBox(height: 16),
Text(
'Extracting text...',
style: TextStyle(color: Colors.white),
),
],
),
),
),
// Text overlay on captured image
if (_capturedResult != null)
_buildTextOverlay(_capturedResult!),
// Result info bar
if (_capturedResult != null && !_isProcessingCapture)
Positioned(
top: 16,
left: 16,
right: 16,
child: Container(
padding: const EdgeInsets.all(12),
decoration: BoxDecoration(
color: _capturedResult!.hasText
? Colors.green.withOpacity(0.9)
: Colors.orange.withOpacity(0.9),
borderRadius: BorderRadius.circular(8),
),
child: Row(
children: [
Icon(
_capturedResult!.hasText
? Icons.check_circle
: Icons.warning,
color: Colors.white,
),
const SizedBox(width: 8),
Expanded(
child: Text(
_capturedResult!.hasText
? '${_capturedResult!.wordCount} words extracted'
: 'No text detected. Try again?',
style: const TextStyle(color: Colors.white),
),
),
],
),
),
),
// Extracted text preview
if (_capturedResult != null &&
_capturedResult!.hasText &&
!_isProcessingCapture)
Positioned(
bottom: 120,
left: 16,
right: 16,
child: Container(
constraints: const BoxConstraints(maxHeight: 200),
padding: const EdgeInsets.all(12),
decoration: BoxDecoration(
color: Colors.black.withOpacity(0.8),
borderRadius: BorderRadius.circular(12),
),
child: SingleChildScrollView(
child: Text(
_capturedResult!.fullText,
style: const TextStyle(
color: Colors.white,
fontSize: 14,
height: 1.4,
),
),
),
),
),
// Action buttons
if (!_isProcessingCapture)
Positioned(
bottom: 32,
left: 32,
right: 32,
child: Row(
children: [
// Retake button
Expanded(
child: ElevatedButton.icon(
onPressed: _retake,
icon: const Icon(Icons.refresh),
label: const Text('Retake'),
style: ElevatedButton.styleFrom(
backgroundColor: AppTheme.surfaceColor,
foregroundColor: Colors.white,
padding: const EdgeInsets.symmetric(vertical: 16),
shape: RoundedRectangleBorder(
borderRadius: BorderRadius.circular(12),
),
),
),
),
const SizedBox(width: 16),
// Confirm button
Expanded(
child: ElevatedButton.icon(
onPressed:
_capturedResult?.hasText == true ? _confirm : null,
icon: const Icon(Icons.check),
label: const Text('Use Text'),
style: ElevatedButton.styleFrom(
backgroundColor: AppTheme.primaryColor,
foregroundColor: Colors.white,
disabledBackgroundColor: Colors.grey,
disabledForegroundColor: Colors.white54,
padding: const EdgeInsets.symmetric(vertical: 16),
shape: RoundedRectangleBorder(
borderRadius: BorderRadius.circular(12),
),
),
),
),
],
),
),
],
);
}
}

View File

@ -0,0 +1,546 @@
import 'dart:async';
import 'dart:math' as math;
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import '../../services/voice_service.dart';
import '../../core/theme.dart';
/// Callback when voice input produces text
typedef VoiceInputCallback = void Function(String text);
/// Modern voice input widget with animations
///
/// Features:
/// - Animated mic button with pulse effect while listening
/// - Real-time transcript display
/// - Waveform visualization
/// - Tap to start/stop
/// - Optional continuous dictation mode
class VoiceInputWidget extends StatefulWidget {
/// Called when transcription is available
final VoiceInputCallback? onTranscript;
/// Called when final result is ready
final VoiceInputCallback? onFinalResult;
/// Enable continuous dictation mode
final bool continuousMode;
/// Language locale (e.g., 'en_US', 'de_DE')
final String locale;
/// Show transcript text
final bool showTranscript;
/// Custom mic button size
final double buttonSize;
/// Compact mode (smaller button, no label)
final bool compact;
const VoiceInputWidget({
super.key,
this.onTranscript,
this.onFinalResult,
this.continuousMode = false,
this.locale = 'en_US',
this.showTranscript = true,
this.buttonSize = 72,
this.compact = false,
});
@override
State<VoiceInputWidget> createState() => _VoiceInputWidgetState();
}
class _VoiceInputWidgetState extends State<VoiceInputWidget>
with TickerProviderStateMixin {
final VoiceService _voiceService = VoiceService();
VoiceStatus _status = VoiceStatus.idle;
String _transcript = '';
String _errorMessage = '';
double _soundLevel = 0.0;
late AnimationController _pulseController;
late AnimationController _waveController;
late Animation<double> _pulseAnimation;
StreamSubscription<VoiceStatus>? _statusSubscription;
StreamSubscription<String>? _transcriptSubscription;
StreamSubscription<double>? _soundLevelSubscription;
@override
void initState() {
super.initState();
// Pulse animation for the mic button
_pulseController = AnimationController(
duration: const Duration(milliseconds: 1000),
vsync: this,
);
_pulseAnimation = Tween<double>(begin: 1.0, end: 1.15).animate(
CurvedAnimation(parent: _pulseController, curve: Curves.easeInOut),
);
// Wave animation
_waveController = AnimationController(
duration: const Duration(milliseconds: 2000),
vsync: this,
);
_initVoiceService();
}
Future<void> _initVoiceService() async {
// Set up callbacks
_voiceService.onResult = (text, isFinal) {
setState(() => _transcript = text);
widget.onTranscript?.call(text);
if (isFinal) {
widget.onFinalResult?.call(text);
}
};
_voiceService.onError = (error, message) {
setState(() {
_errorMessage = message;
_status = VoiceStatus.error;
});
_stopAnimations();
// Auto-clear error after 3 seconds
Future.delayed(const Duration(seconds: 3), () {
if (mounted && _status == VoiceStatus.error) {
setState(() {
_errorMessage = '';
_status = VoiceStatus.ready;
});
}
});
};
// Subscribe to streams
_statusSubscription = _voiceService.statusStream.listen((status) {
setState(() => _status = status);
if (status == VoiceStatus.listening) {
_startAnimations();
} else {
_stopAnimations();
}
});
_soundLevelSubscription = _voiceService.soundLevelStream.listen((level) {
setState(() => _soundLevel = level);
});
// Initialize service
await _voiceService.initialize();
setState(() => _status = _voiceService.status);
}
void _startAnimations() {
_pulseController.repeat(reverse: true);
_waveController.repeat();
HapticFeedback.lightImpact();
}
void _stopAnimations() {
_pulseController.stop();
_pulseController.reset();
_waveController.stop();
_waveController.reset();
}
Future<void> _toggleListening() async {
HapticFeedback.mediumImpact();
if (_voiceService.isListening) {
await _voiceService.stopListening();
} else {
setState(() {
_transcript = '';
_errorMessage = '';
});
await _voiceService.startListening(
localeId: widget.locale,
continuous: widget.continuousMode,
);
}
}
@override
void dispose() {
_pulseController.dispose();
_waveController.dispose();
_statusSubscription?.cancel();
_transcriptSubscription?.cancel();
_soundLevelSubscription?.cancel();
_voiceService.dispose();
super.dispose();
}
@override
Widget build(BuildContext context) {
if (widget.compact) {
return _buildCompactButton();
}
return Column(
mainAxisSize: MainAxisSize.min,
children: [
// Transcript display
if (widget.showTranscript) ...[
_buildTranscriptArea(),
const SizedBox(height: 16),
],
// Waveform visualization
if (_status == VoiceStatus.listening) ...[
_buildWaveform(),
const SizedBox(height: 16),
],
// Mic button with animations
_buildMicButton(),
// Status label
const SizedBox(height: 8),
_buildStatusLabel(),
// Error message
if (_errorMessage.isNotEmpty) ...[
const SizedBox(height: 8),
_buildErrorMessage(),
],
],
);
}
Widget _buildCompactButton() {
return _buildMicButton();
}
Widget _buildTranscriptArea() {
final isListening = _status == VoiceStatus.listening;
return AnimatedContainer(
duration: const Duration(milliseconds: 200),
padding: const EdgeInsets.all(16),
decoration: BoxDecoration(
color: AppTheme.surfaceColor,
borderRadius: BorderRadius.circular(16),
border: Border.all(
color: isListening
? AppTheme.primaryColor.withOpacity(0.5)
: Colors.transparent,
width: 2,
),
),
child: Row(
children: [
Expanded(
child: Text(
_transcript.isEmpty
? (isListening ? 'Listening...' : 'Tap mic to speak')
: _transcript,
style: TextStyle(
color: _transcript.isEmpty
? Colors.grey
: AppTheme.textColor,
fontSize: 16,
fontStyle: _transcript.isEmpty
? FontStyle.italic
: FontStyle.normal,
),
),
),
if (_transcript.isNotEmpty)
IconButton(
icon: const Icon(Icons.copy, size: 20),
onPressed: () {
Clipboard.setData(ClipboardData(text: _transcript));
ScaffoldMessenger.of(context).showSnackBar(
const SnackBar(
content: Text('Copied to clipboard'),
duration: Duration(seconds: 1),
),
);
},
color: Colors.grey,
tooltip: 'Copy',
),
],
),
);
}
Widget _buildWaveform() {
return AnimatedBuilder(
animation: _waveController,
builder: (context, child) {
return CustomPaint(
size: Size(MediaQuery.of(context).size.width - 64, 48),
painter: WaveformPainter(
soundLevel: _soundLevel,
animationValue: _waveController.value,
color: AppTheme.primaryColor,
),
);
},
);
}
Widget _buildMicButton() {
final isListening = _status == VoiceStatus.listening;
final isProcessing = _status == VoiceStatus.processing;
final isError = _status == VoiceStatus.error;
final isInitializing = _status == VoiceStatus.initializing;
return GestureDetector(
onTap: isInitializing ? null : _toggleListening,
child: AnimatedBuilder(
animation: _pulseAnimation,
builder: (context, child) {
final scale = isListening ? _pulseAnimation.value : 1.0;
return Transform.scale(
scale: scale,
child: Stack(
alignment: Alignment.center,
children: [
// Outer pulse rings (only when listening)
if (isListening) ...[
_buildPulseRing(widget.buttonSize * 1.6, 0.1),
_buildPulseRing(widget.buttonSize * 1.3, 0.2),
],
// Main button
Container(
width: widget.buttonSize,
height: widget.buttonSize,
decoration: BoxDecoration(
shape: BoxShape.circle,
gradient: LinearGradient(
begin: Alignment.topLeft,
end: Alignment.bottomRight,
colors: isError
? [Colors.red.shade400, Colors.red.shade700]
: isListening
? [AppTheme.primaryColor, AppTheme.secondaryColor]
: [
AppTheme.surfaceColor,
AppTheme.surfaceColor.withOpacity(0.8),
],
),
boxShadow: [
BoxShadow(
color: isListening
? AppTheme.primaryColor.withOpacity(0.4)
: Colors.black.withOpacity(0.2),
blurRadius: isListening ? 20 : 10,
spreadRadius: isListening ? 2 : 0,
),
],
),
child: Center(
child: isInitializing || isProcessing
? SizedBox(
width: widget.buttonSize * 0.4,
height: widget.buttonSize * 0.4,
child: CircularProgressIndicator(
strokeWidth: 3,
valueColor: AlwaysStoppedAnimation<Color>(
isProcessing ? AppTheme.primaryColor : Colors.white,
),
),
)
: Icon(
isListening ? Icons.stop : Icons.mic,
color: isListening || isError
? Colors.white
: AppTheme.primaryColor,
size: widget.buttonSize * 0.45,
),
),
),
],
),
);
},
),
);
}
Widget _buildPulseRing(double size, double opacity) {
return AnimatedBuilder(
animation: _pulseController,
builder: (context, child) {
return Container(
width: size * _pulseAnimation.value,
height: size * _pulseAnimation.value,
decoration: BoxDecoration(
shape: BoxShape.circle,
border: Border.all(
color: AppTheme.primaryColor.withOpacity(opacity * (1 - _pulseController.value)),
width: 2,
),
),
);
},
);
}
Widget _buildStatusLabel() {
String label;
Color color;
switch (_status) {
case VoiceStatus.idle:
case VoiceStatus.initializing:
label = 'Initializing...';
color = Colors.grey;
break;
case VoiceStatus.ready:
label = 'Tap to speak';
color = Colors.grey;
break;
case VoiceStatus.listening:
label = widget.continuousMode ? 'Listening (continuous)' : 'Listening...';
color = AppTheme.primaryColor;
break;
case VoiceStatus.processing:
label = 'Processing...';
color = AppTheme.secondaryColor;
break;
case VoiceStatus.error:
label = 'Error';
color = Colors.red;
break;
}
return AnimatedDefaultTextStyle(
duration: const Duration(milliseconds: 200),
style: TextStyle(
color: color,
fontSize: 14,
fontWeight: _status == VoiceStatus.listening
? FontWeight.w600
: FontWeight.normal,
),
child: Text(label),
);
}
Widget _buildErrorMessage() {
return Container(
padding: const EdgeInsets.symmetric(horizontal: 16, vertical: 8),
decoration: BoxDecoration(
color: Colors.red.withOpacity(0.1),
borderRadius: BorderRadius.circular(8),
),
child: Text(
_errorMessage,
style: const TextStyle(
color: Colors.red,
fontSize: 12,
),
textAlign: TextAlign.center,
),
);
}
}
/// Custom painter for waveform visualization
class WaveformPainter extends CustomPainter {
final double soundLevel;
final double animationValue;
final Color color;
WaveformPainter({
required this.soundLevel,
required this.animationValue,
required this.color,
});
@override
void paint(Canvas canvas, Size size) {
final paint = Paint()
..color = color.withOpacity(0.8)
..strokeWidth = 3
..strokeCap = StrokeCap.round
..style = PaintingStyle.stroke;
const barCount = 32;
final barWidth = size.width / (barCount * 2);
final centerY = size.height / 2;
for (int i = 0; i < barCount; i++) {
// Create wave effect with sound level influence
final progress = i / barCount;
final wave = math.sin((progress * math.pi * 4) + (animationValue * math.pi * 2));
// Add some randomness based on sound level
final intensity = soundLevel * 0.7 + 0.3;
final height = (wave * intensity * size.height * 0.4).abs() + 2;
final x = (i * 2 + 1) * barWidth;
canvas.drawLine(
Offset(x, centerY - height / 2),
Offset(x, centerY + height / 2),
paint..color = color.withOpacity(0.4 + intensity * 0.6),
);
}
}
@override
bool shouldRepaint(WaveformPainter oldDelegate) {
return oldDelegate.soundLevel != soundLevel ||
oldDelegate.animationValue != animationValue;
}
}
/// Standalone mic button for inline use
class VoiceMicButton extends StatelessWidget {
final VoiceService voiceService;
final VoidCallback? onTap;
final double size;
final bool isListening;
const VoiceMicButton({
super.key,
required this.voiceService,
this.onTap,
this.size = 48,
this.isListening = false,
});
@override
Widget build(BuildContext context) {
return GestureDetector(
onTap: onTap,
child: AnimatedContainer(
duration: const Duration(milliseconds: 200),
width: size,
height: size,
decoration: BoxDecoration(
shape: BoxShape.circle,
color: isListening ? AppTheme.primaryColor : AppTheme.surfaceColor,
boxShadow: isListening ? [
BoxShadow(
color: AppTheme.primaryColor.withOpacity(0.4),
blurRadius: 12,
spreadRadius: 2,
),
] : null,
),
child: Icon(
isListening ? Icons.stop : Icons.mic,
color: isListening ? Colors.white : AppTheme.primaryColor,
size: size * 0.5,
),
),
);
}
}

View File

@ -1,8 +1,8 @@
import 'package:flutter/material.dart';
import '../../core/theme.dart';
import '../../core/auth.dart';
import '../../services/biometric_service.dart';
/// Settings screen
/// Settings screen with biometric authentication configuration
class SettingsScreen extends StatefulWidget {
const SettingsScreen({super.key});
@ -11,21 +11,103 @@ class SettingsScreen extends StatefulWidget {
}
class _SettingsScreenState extends State<SettingsScreen> {
final AuthService _authService = AuthService();
final BiometricService _biometricService = BiometricService();
bool _biometricsEnabled = false;
bool _biometricsAvailable = false;
LockPolicy _lockPolicy = LockPolicy.afterInactive;
String _biometricTypeName = 'Biometrics';
bool _isLoading = true;
@override
void initState() {
super.initState();
_checkBiometrics();
_loadSettings();
}
Future<void> _checkBiometrics() async {
final available = await _authService.isBiometricsAvailable();
setState(() {
_biometricsAvailable = available;
});
Future<void> _loadSettings() async {
final available = await _biometricService.isBiometricsAvailable();
final enabled = await _biometricService.isBiometricEnabled();
final policy = await _biometricService.getLockPolicy();
final typeName = await _biometricService.getBiometricTypeName();
if (mounted) {
setState(() {
_biometricsAvailable = available;
_biometricsEnabled = enabled;
_lockPolicy = policy;
_biometricTypeName = typeName;
_isLoading = false;
});
}
}
Future<void> _toggleBiometrics(bool value) async {
if (value) {
// Verify biometrics work before enabling
final result = await _biometricService.authenticate(
reason: 'Verify biometrics to enable',
);
if (result != BiometricResult.success) {
if (mounted) {
_showError(_biometricService.getErrorMessage(result));
}
return;
}
}
await _biometricService.setBiometricEnabled(value);
if (mounted) {
setState(() {
_biometricsEnabled = value;
});
}
}
Future<void> _changeLockPolicy(LockPolicy? policy) async {
if (policy == null) return;
await _biometricService.setLockPolicy(policy);
if (mounted) {
setState(() {
_lockPolicy = policy;
});
}
}
void _showError(String message) {
ScaffoldMessenger.of(context).showSnackBar(
SnackBar(
content: Text(message),
backgroundColor: Colors.red.shade700,
behavior: SnackBarBehavior.floating,
),
);
}
String _policyDisplayName(LockPolicy policy) {
switch (policy) {
case LockPolicy.always:
return 'Always';
case LockPolicy.afterInactive:
return 'After 5 minutes inactive';
case LockPolicy.never:
return 'Never';
}
}
String _policyDescription(LockPolicy policy) {
switch (policy) {
case LockPolicy.always:
return 'Require authentication every time you open the app';
case LockPolicy.afterInactive:
return 'Require authentication after 5 minutes of inactivity';
case LockPolicy.never:
return 'Never require authentication (not recommended)';
}
}
@override
@ -35,68 +117,175 @@ class _SettingsScreenState extends State<SettingsScreen> {
title: const Text('Settings'),
centerTitle: true,
),
body: ListView(
children: [
_buildSection(
title: 'Security',
children: [
SwitchListTile(
title: const Text('Biometric Authentication'),
subtitle: Text(
_biometricsAvailable
? 'Use fingerprint or face to unlock'
: 'Not available on this device',
body: _isLoading
? const Center(child: CircularProgressIndicator())
: ListView(
children: [
_buildSection(
title: 'Security',
children: [
// Biometric toggle
SwitchListTile(
title: Text('$_biometricTypeName Authentication'),
subtitle: Text(
_biometricsAvailable
? 'Use $_biometricTypeName to unlock the app'
: 'Not available on this device',
),
value: _biometricsEnabled && _biometricsAvailable,
onChanged: _biometricsAvailable ? _toggleBiometrics : null,
activeColor: AppTheme.primaryColor,
secondary: Icon(
_biometricsAvailable
? Icons.fingerprint
: Icons.no_encryption,
color: _biometricsAvailable
? AppTheme.primaryColor
: Colors.grey,
),
),
// Lock policy (only shown if biometrics enabled)
if (_biometricsEnabled && _biometricsAvailable) ...[
const Divider(height: 1),
ListTile(
leading: const Icon(Icons.lock_clock),
title: const Text('Lock Timing'),
subtitle: Text(_policyDisplayName(_lockPolicy)),
trailing: const Icon(Icons.chevron_right),
onTap: () => _showLockPolicyDialog(),
),
],
// Biometrics not enrolled warning
if (!_biometricsAvailable) ...[
const Divider(height: 1),
ListTile(
leading: Icon(
Icons.warning_amber,
color: Colors.orange.shade400,
),
title: const Text('Set Up Biometrics'),
subtitle: const Text(
'Enable Face ID, Touch ID, or fingerprint in your device settings',
),
onTap: () {
_showBiometricsSetupInfo();
},
),
],
],
),
_buildSection(
title: 'Input',
children: [
ListTile(
leading: const Icon(Icons.camera_alt),
title: const Text('Camera Permissions'),
trailing: const Icon(Icons.chevron_right),
onTap: () {
// TODO: Open camera permissions
},
),
ListTile(
leading: const Icon(Icons.mic),
title: const Text('Microphone Permissions'),
trailing: const Icon(Icons.chevron_right),
onTap: () {
// TODO: Open microphone permissions
},
),
],
),
_buildSection(
title: 'About',
children: [
const ListTile(
leading: Icon(Icons.info_outline),
title: Text('Version'),
trailing: Text('1.0.0'),
),
ListTile(
leading: const Icon(Icons.code),
title: const Text('Open Source Licenses'),
trailing: const Icon(Icons.chevron_right),
onTap: () {
showLicensePage(context: context);
},
),
],
),
],
),
);
}
void _showLockPolicyDialog() {
showDialog(
context: context,
builder: (context) => AlertDialog(
backgroundColor: AppTheme.surfaceColor,
title: const Text('Lock Timing'),
content: Column(
mainAxisSize: MainAxisSize.min,
children: LockPolicy.values.map((policy) {
return RadioListTile<LockPolicy>(
title: Text(_policyDisplayName(policy)),
subtitle: Text(
_policyDescription(policy),
style: TextStyle(
fontSize: 12,
color: AppTheme.textColor.withOpacity(0.7),
),
value: _biometricsEnabled && _biometricsAvailable,
onChanged: _biometricsAvailable
? (value) {
setState(() {
_biometricsEnabled = value;
});
}
: null,
activeColor: AppTheme.primaryColor,
),
],
value: policy,
groupValue: _lockPolicy,
activeColor: AppTheme.primaryColor,
onChanged: (value) {
_changeLockPolicy(value);
Navigator.of(context).pop();
},
);
}).toList(),
),
actions: [
TextButton(
onPressed: () => Navigator.of(context).pop(),
child: const Text('Cancel'),
),
_buildSection(
title: 'Input',
children: [
ListTile(
leading: const Icon(Icons.camera_alt),
title: const Text('Camera Permissions'),
trailing: const Icon(Icons.chevron_right),
onTap: () {
// TODO: Open camera permissions
},
),
ListTile(
leading: const Icon(Icons.mic),
title: const Text('Microphone Permissions'),
trailing: const Icon(Icons.chevron_right),
onTap: () {
// TODO: Open microphone permissions
},
),
],
),
_buildSection(
title: 'About',
children: [
const ListTile(
leading: Icon(Icons.info_outline),
title: Text('Version'),
trailing: Text('1.0.0'),
),
ListTile(
leading: const Icon(Icons.code),
title: const Text('Open Source Licenses'),
trailing: const Icon(Icons.chevron_right),
onTap: () {
showLicensePage(context: context);
},
),
],
],
),
);
}
void _showBiometricsSetupInfo() {
showDialog(
context: context,
builder: (context) => AlertDialog(
backgroundColor: AppTheme.surfaceColor,
title: const Text('Set Up Biometrics'),
content: const Column(
mainAxisSize: MainAxisSize.min,
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Text('To use biometric authentication:'),
SizedBox(height: 16),
Text('iOS:'),
Text('Settings → Face ID & Passcode (or Touch ID)'),
SizedBox(height: 12),
Text('Android:'),
Text('Settings → Security → Fingerprint / Face unlock'),
SizedBox(height: 16),
Text(
'After setting up biometrics on your device, return to this app to enable authentication.',
style: TextStyle(fontSize: 12),
),
],
),
actions: [
TextButton(
onPressed: () => Navigator.of(context).pop(),
child: const Text('Got it'),
),
],
),

View File

@ -0,0 +1,299 @@
import 'package:flutter/services.dart';
import 'package:local_auth/local_auth.dart';
import 'package:local_auth/error_codes.dart' as auth_error;
import 'package:shared_preferences/shared_preferences.dart';
/// Biometric authentication result
enum BiometricResult {
success,
failed,
cancelled,
notAvailable,
notEnrolled,
lockedOut,
permanentlyLockedOut,
error,
}
/// Lock requirement policy
enum LockPolicy {
always,
afterInactive, // After 5 min of inactivity
never,
}
/// Biometric authentication service with session state and preferences
class BiometricService {
static final BiometricService _instance = BiometricService._internal();
factory BiometricService() => _instance;
BiometricService._internal();
final LocalAuthentication _localAuth = LocalAuthentication();
// Session state
bool _isAuthenticated = false;
DateTime? _lastActivityTime;
// Constants
static const String _prefKeyEnabled = 'biometric_enabled';
static const String _prefKeyPolicy = 'biometric_lock_policy';
static const Duration _inactivityTimeout = Duration(minutes: 5);
/// Whether user is currently authenticated in this session
bool get isAuthenticated => _isAuthenticated;
/// Mark activity to track inactivity timeout
void recordActivity() {
_lastActivityTime = DateTime.now();
}
/// Check if device supports any biometrics
Future<bool> isDeviceSupported() async {
try {
return await _localAuth.isDeviceSupported();
} catch (e) {
return false;
}
}
/// Check if biometrics can be used (hardware exists)
Future<bool> canCheckBiometrics() async {
try {
return await _localAuth.canCheckBiometrics;
} catch (e) {
return false;
}
}
/// Check if biometrics are available (supported + enrolled)
Future<bool> isBiometricsAvailable() async {
try {
final canCheck = await _localAuth.canCheckBiometrics;
final isSupported = await _localAuth.isDeviceSupported();
return canCheck && isSupported;
} catch (e) {
return false;
}
}
/// Get available biometric types on this device
Future<List<BiometricType>> getAvailableBiometrics() async {
try {
return await _localAuth.getAvailableBiometrics();
} catch (e) {
return [];
}
}
/// Get human-readable biometric type name
Future<String> getBiometricTypeName() async {
final types = await getAvailableBiometrics();
if (types.contains(BiometricType.face)) {
return 'Face ID';
} else if (types.contains(BiometricType.fingerprint)) {
return 'Fingerprint';
} else if (types.contains(BiometricType.iris)) {
return 'Iris';
} else if (types.contains(BiometricType.strong)) {
return 'Biometrics';
} else if (types.contains(BiometricType.weak)) {
return 'Biometrics';
}
return 'Biometrics';
}
/// Authenticate with biometrics, with fallback to device PIN/password
Future<BiometricResult> authenticate({
String reason = 'Please authenticate to access inou',
bool biometricOnly = false,
}) async {
// Check if already authenticated in session
if (_isAuthenticated && !_shouldRequireReauth()) {
return BiometricResult.success;
}
// Check availability
final available = await isBiometricsAvailable();
if (!available) {
final canCheck = await canCheckBiometrics();
if (!canCheck) {
return BiometricResult.notEnrolled;
}
return BiometricResult.notAvailable;
}
try {
final success = await _localAuth.authenticate(
localizedReason: reason,
options: AuthenticationOptions(
stickyAuth: true,
biometricOnly: biometricOnly,
useErrorDialogs: true,
sensitiveTransaction: true,
),
);
if (success) {
_isAuthenticated = true;
_lastActivityTime = DateTime.now();
return BiometricResult.success;
}
return BiometricResult.failed;
} on PlatformException catch (e) {
return _handlePlatformException(e);
} catch (e) {
return BiometricResult.error;
}
}
/// Handle platform-specific errors
BiometricResult _handlePlatformException(PlatformException e) {
switch (e.code) {
case auth_error.notAvailable:
return BiometricResult.notAvailable;
case auth_error.notEnrolled:
return BiometricResult.notEnrolled;
case auth_error.lockedOut:
return BiometricResult.lockedOut;
case auth_error.permanentlyLockedOut:
return BiometricResult.permanentlyLockedOut;
case auth_error.passcodeNotSet:
return BiometricResult.notEnrolled;
default:
// User cancelled or other error
if (e.message?.toLowerCase().contains('cancel') == true) {
return BiometricResult.cancelled;
}
return BiometricResult.error;
}
}
/// Check if re-authentication is needed based on policy
bool _shouldRequireReauth() {
final policy = _currentPolicyCache ?? LockPolicy.afterInactive;
switch (policy) {
case LockPolicy.always:
return true;
case LockPolicy.never:
return false;
case LockPolicy.afterInactive:
if (_lastActivityTime == null) return true;
return DateTime.now().difference(_lastActivityTime!) > _inactivityTimeout;
}
}
/// Check if authentication is required (for app resume scenarios)
Future<bool> isAuthenticationRequired() async {
final enabled = await isBiometricEnabled();
if (!enabled) return false;
final available = await isBiometricsAvailable();
if (!available) return false;
final policy = await getLockPolicy();
switch (policy) {
case LockPolicy.always:
return true;
case LockPolicy.never:
return false;
case LockPolicy.afterInactive:
if (!_isAuthenticated) return true;
if (_lastActivityTime == null) return true;
return DateTime.now().difference(_lastActivityTime!) > _inactivityTimeout;
}
}
/// Reset authentication state (for logout or app background)
void resetAuthState() {
_isAuthenticated = false;
_lastActivityTime = null;
}
/// Cancel any ongoing authentication
Future<void> cancelAuthentication() async {
try {
await _localAuth.stopAuthentication();
} catch (_) {}
}
// Preference management
LockPolicy? _currentPolicyCache;
/// Check if biometric authentication is enabled
Future<bool> isBiometricEnabled() async {
final prefs = await SharedPreferences.getInstance();
return prefs.getBool(_prefKeyEnabled) ?? false;
}
/// Enable or disable biometric authentication
Future<void> setBiometricEnabled(bool enabled) async {
final prefs = await SharedPreferences.getInstance();
await prefs.setBool(_prefKeyEnabled, enabled);
if (!enabled) {
_isAuthenticated = true; // Don't require auth if disabled
}
}
/// Get current lock policy
Future<LockPolicy> getLockPolicy() async {
final prefs = await SharedPreferences.getInstance();
final value = prefs.getString(_prefKeyPolicy);
_currentPolicyCache = _policyFromString(value);
return _currentPolicyCache!;
}
/// Set lock policy
Future<void> setLockPolicy(LockPolicy policy) async {
final prefs = await SharedPreferences.getInstance();
await prefs.setString(_prefKeyPolicy, _policyToString(policy));
_currentPolicyCache = policy;
}
String _policyToString(LockPolicy policy) {
switch (policy) {
case LockPolicy.always:
return 'always';
case LockPolicy.afterInactive:
return 'after_inactive';
case LockPolicy.never:
return 'never';
}
}
LockPolicy _policyFromString(String? value) {
switch (value) {
case 'always':
return LockPolicy.always;
case 'never':
return LockPolicy.never;
case 'after_inactive':
default:
return LockPolicy.afterInactive;
}
}
/// Get user-friendly error message for a result
String getErrorMessage(BiometricResult result) {
switch (result) {
case BiometricResult.success:
return 'Authentication successful';
case BiometricResult.failed:
return 'Authentication failed. Please try again.';
case BiometricResult.cancelled:
return 'Authentication was cancelled';
case BiometricResult.notAvailable:
return 'Biometric authentication is not available on this device';
case BiometricResult.notEnrolled:
return 'No biometrics enrolled. Please set up Face ID, Touch ID, or fingerprint in your device settings.';
case BiometricResult.lockedOut:
return 'Too many failed attempts. Please try again later or use your device PIN.';
case BiometricResult.permanentlyLockedOut:
return 'Biometrics are locked. Please unlock your device using PIN/password first.';
case BiometricResult.error:
return 'An error occurred. Please try again.';
}
}
}

View File

@ -1,67 +0,0 @@
import 'package:local_auth/local_auth.dart';
/// Biometrics authentication service
class BiometricsService {
final LocalAuthentication _localAuth = LocalAuthentication();
/// Check if device supports biometrics
Future<bool> isSupported() async {
try {
return await _localAuth.isDeviceSupported();
} catch (e) {
return false;
}
}
/// Check if biometrics are enrolled
Future<bool> canCheckBiometrics() async {
try {
return await _localAuth.canCheckBiometrics;
} catch (e) {
return false;
}
}
/// Get available biometric types
Future<List<BiometricType>> getAvailableBiometrics() async {
try {
return await _localAuth.getAvailableBiometrics();
} catch (e) {
return [];
}
}
/// Authenticate with biometrics
Future<BiometricResult> authenticate({
String reason = 'Please authenticate to access inou',
bool biometricOnly = false,
}) async {
try {
final success = await _localAuth.authenticate(
localizedReason: reason,
options: AuthenticationOptions(
stickyAuth: true,
biometricOnly: biometricOnly,
useErrorDialogs: true,
),
);
return success
? BiometricResult.success
: BiometricResult.failed;
} catch (e) {
return BiometricResult.error;
}
}
/// Stop authentication
Future<void> stopAuthentication() async {
await _localAuth.stopAuthentication();
}
}
enum BiometricResult {
success,
failed,
error,
}

View File

@ -1,41 +1,293 @@
import 'dart:async';
import 'dart:io';
import 'dart:ui' as ui;
import 'dart:ui' show Rect, Size;
import 'package:camera/camera.dart';
import 'package:flutter/foundation.dart';
import 'package:google_mlkit_text_recognition/google_mlkit_text_recognition.dart';
/// Structured text block with bounding box info
class OcrTextBlock {
final String text;
final List<OcrTextLine> lines;
final Rect boundingBox;
final List<ui.Offset> cornerPoints;
final String? recognizedLanguage;
OcrTextBlock({
required this.text,
required this.lines,
required this.boundingBox,
required this.cornerPoints,
this.recognizedLanguage,
});
/// Create from ML Kit TextBlock
factory OcrTextBlock.fromMlKit(TextBlock block) {
return OcrTextBlock(
text: block.text,
lines: block.lines.map((l) => OcrTextLine.fromMlKit(l)).toList(),
boundingBox: block.boundingBox,
cornerPoints: block.cornerPoints
.map((p) => ui.Offset(p.x.toDouble(), p.y.toDouble()))
.toList(),
recognizedLanguage: block.recognizedLanguages.isNotEmpty
? block.recognizedLanguages.first.split('-').first
: null,
);
}
}
/// Text line within a block
class OcrTextLine {
final String text;
final List<OcrTextElement> elements;
final Rect boundingBox;
final List<ui.Offset> cornerPoints;
OcrTextLine({
required this.text,
required this.elements,
required this.boundingBox,
required this.cornerPoints,
});
factory OcrTextLine.fromMlKit(TextLine line) {
return OcrTextLine(
text: line.text,
elements: line.elements.map((e) => OcrTextElement.fromMlKit(e)).toList(),
boundingBox: line.boundingBox,
cornerPoints: line.cornerPoints
.map((p) => ui.Offset(p.x.toDouble(), p.y.toDouble()))
.toList(),
);
}
}
/// Individual text element (word)
class OcrTextElement {
final String text;
final Rect boundingBox;
final List<ui.Offset> cornerPoints;
OcrTextElement({
required this.text,
required this.boundingBox,
required this.cornerPoints,
});
factory OcrTextElement.fromMlKit(TextElement element) {
return OcrTextElement(
text: element.text,
boundingBox: element.boundingBox,
cornerPoints: element.cornerPoints
.map((p) => ui.Offset(p.x.toDouble(), p.y.toDouble()))
.toList(),
);
}
}
/// Complete OCR result
class OcrResult {
final String fullText;
final List<OcrTextBlock> blocks;
final Size imageSize;
final DateTime timestamp;
final String? imagePath;
OcrResult({
required this.fullText,
required this.blocks,
required this.imageSize,
required this.timestamp,
this.imagePath,
});
/// Check if any text was found
bool get hasText => fullText.isNotEmpty;
/// Get all recognized languages
Set<String> get languages =>
blocks.where((b) => b.recognizedLanguage != null)
.map((b) => b.recognizedLanguage!)
.toSet();
/// Get total number of words
int get wordCount =>
blocks.expand((b) => b.lines).expand((l) => l.elements).length;
}
/// OCR service using ML Kit Text Recognition
class OcrService {
final TextRecognizer _textRecognizer = TextRecognizer();
/// Process an image and extract text
Future<String> processImage(String imagePath) async {
TextRecognizer? _textRecognizer;
bool _isProcessing = false;
/// Get or create text recognizer
TextRecognizer get textRecognizer {
_textRecognizer ??= TextRecognizer(script: TextRecognitionScript.latin);
return _textRecognizer!;
}
/// Whether OCR processing is currently running
bool get isProcessing => _isProcessing;
/// Process an image file and extract structured text
Future<OcrResult> processImageFile(String imagePath) async {
if (_isProcessing) {
throw OcrException('OCR processing already in progress');
}
_isProcessing = true;
try {
final file = File(imagePath);
if (!await file.exists()) {
throw OcrException('Image file not found: $imagePath');
}
final inputImage = InputImage.fromFilePath(imagePath);
final recognizedText = await _textRecognizer.processImage(inputImage);
return recognizedText.text;
final recognizedText = await textRecognizer.processImage(inputImage);
// Get image dimensions
final bytes = await file.readAsBytes();
final codec = await ui.instantiateImageCodec(bytes);
final frame = await codec.getNextFrame();
final imageSize = Size(
frame.image.width.toDouble(),
frame.image.height.toDouble(),
);
return OcrResult(
fullText: recognizedText.text,
blocks: recognizedText.blocks
.map((b) => OcrTextBlock.fromMlKit(b))
.toList(),
imageSize: imageSize,
timestamp: DateTime.now(),
imagePath: imagePath,
);
} catch (e) {
if (e is OcrException) rethrow;
throw OcrException('Failed to process image: $e');
} finally {
_isProcessing = false;
}
}
/// Get structured text blocks from image
Future<List<TextBlock>> getTextBlocks(String imagePath) async {
/// Process camera image for live preview scanning
Future<OcrResult?> processCameraImage(
CameraImage image,
CameraDescription camera,
int sensorOrientation,
) async {
if (_isProcessing) return null;
_isProcessing = true;
try {
final inputImage = InputImage.fromFilePath(imagePath);
final recognizedText = await _textRecognizer.processImage(inputImage);
return recognizedText.blocks;
final inputImage = _inputImageFromCameraImage(
image,
camera,
sensorOrientation,
);
if (inputImage == null) {
return null;
}
final recognizedText = await textRecognizer.processImage(inputImage);
return OcrResult(
fullText: recognizedText.text,
blocks: recognizedText.blocks
.map((b) => OcrTextBlock.fromMlKit(b))
.toList(),
imageSize: Size(image.width.toDouble(), image.height.toDouble()),
timestamp: DateTime.now(),
);
} catch (e) {
throw OcrException('Failed to get text blocks: $e');
debugPrint('OCR processing error: $e');
return null;
} finally {
_isProcessing = false;
}
}
/// Convert CameraImage to InputImage for ML Kit
InputImage? _inputImageFromCameraImage(
CameraImage image,
CameraDescription camera,
int sensorOrientation,
) {
// Get rotation based on platform
final rotation = _getRotation(camera, sensorOrientation);
if (rotation == null) return null;
// Get image format
final format = InputImageFormatValue.fromRawValue(image.format.raw);
if (format == null ||
(Platform.isAndroid && format != InputImageFormat.nv21) ||
(Platform.isIOS && format != InputImageFormat.bgra8888)) {
return null;
}
// Only single plane supported
if (image.planes.isEmpty) return null;
final plane = image.planes.first;
return InputImage.fromBytes(
bytes: plane.bytes,
metadata: InputImageMetadata(
size: Size(image.width.toDouble(), image.height.toDouble()),
rotation: rotation,
format: format,
bytesPerRow: plane.bytesPerRow,
),
);
}
/// Get image rotation for ML Kit
InputImageRotation? _getRotation(
CameraDescription camera,
int sensorOrientation,
) {
if (Platform.isIOS) {
return InputImageRotationValue.fromRawValue(sensorOrientation);
} else if (Platform.isAndroid) {
// Compensate for camera and device orientation
var rotationCompensation = sensorOrientation;
if (camera.lensDirection == CameraLensDirection.front) {
rotationCompensation = (sensorOrientation + 360) % 360;
}
return InputImageRotationValue.fromRawValue(rotationCompensation);
}
return null;
}
/// Extract just the text from an image (simple API)
Future<String> extractText(String imagePath) async {
final result = await processImageFile(imagePath);
return result.fullText;
}
/// Get all text blocks from an image
Future<List<OcrTextBlock>> getTextBlocks(String imagePath) async {
final result = await processImageFile(imagePath);
return result.blocks;
}
/// Cleanup resources
void dispose() {
_textRecognizer.close();
_textRecognizer?.close();
_textRecognizer = null;
}
}
class OcrException implements Exception {
final String message;
OcrException(this.message);
@override
String toString() => 'OcrException: $message';
}

View File

@ -1,22 +1,76 @@
import 'dart:async';
import 'package:speech_to_text/speech_to_text.dart';
import 'package:speech_to_text/speech_recognition_result.dart';
import 'package:speech_to_text/speech_recognition_error.dart';
import 'package:permission_handler/permission_handler.dart';
/// Voice input error types
enum VoiceError {
noMicrophone,
permissionDenied,
notAvailable,
recognitionFailed,
timeout,
unknown,
}
/// Voice service status
enum VoiceStatus {
idle,
initializing,
ready,
listening,
processing,
error,
}
/// Callback types for voice events
typedef VoiceResultCallback = void Function(String text, bool isFinal);
typedef VoiceStatusCallback = void Function(VoiceStatus status);
typedef VoiceErrorCallback = void Function(VoiceError error, String message);
typedef VoiceSoundLevelCallback = void Function(double level);
/// Voice input service using Speech to Text
///
/// Features:
/// - Speech recognition with partial results
/// - Continuous dictation mode
/// - Language selection
/// - Proper error handling
/// - Permission management
class VoiceService {
final SpeechToText _speechToText = SpeechToText();
bool _isInitialized = false;
/// Initialize the speech recognition
Future<bool> initialize() async {
if (_isInitialized) return true;
_isInitialized = await _speechToText.initialize(
onError: (error) => print('Speech error: $error'),
onStatus: (status) => print('Speech status: $status'),
);
return _isInitialized;
}
bool _isInitialized = false;
VoiceStatus _status = VoiceStatus.idle;
String _currentLocale = 'en_US';
bool _continuousMode = false;
// Callbacks
VoiceResultCallback? onResult;
VoiceStatusCallback? onStatusChange;
VoiceErrorCallback? onError;
VoiceSoundLevelCallback? onSoundLevel;
// Stream controllers for reactive updates
final StreamController<VoiceStatus> _statusController =
StreamController<VoiceStatus>.broadcast();
final StreamController<String> _transcriptController =
StreamController<String>.broadcast();
final StreamController<double> _soundLevelController =
StreamController<double>.broadcast();
/// Status stream
Stream<VoiceStatus> get statusStream => _statusController.stream;
/// Transcript stream (partial and final results)
Stream<String> get transcriptStream => _transcriptController.stream;
/// Sound level stream (0.0 to 1.0)
Stream<double> get soundLevelStream => _soundLevelController.stream;
/// Current status
VoiceStatus get status => _status;
/// Check if speech recognition is available
bool get isAvailable => _isInitialized && _speechToText.isAvailable;
@ -24,32 +78,138 @@ class VoiceService {
/// Check if currently listening
bool get isListening => _speechToText.isListening;
/// Start listening for speech
Future<void> startListening({
required void Function(SpeechRecognitionResult) onResult,
String localeId = 'en_US',
}) async {
if (!_isInitialized) {
await initialize();
/// Current locale
String get currentLocale => _currentLocale;
/// Continuous mode enabled
bool get isContinuousMode => _continuousMode;
/// Set continuous mode
set continuousMode(bool value) => _continuousMode = value;
/// Check microphone permission
Future<bool> checkPermission() async {
final status = await Permission.microphone.status;
return status.isGranted;
}
/// Request microphone permission
Future<bool> requestPermission() async {
final status = await Permission.microphone.request();
if (status.isPermanentlyDenied) {
_emitError(VoiceError.permissionDenied,
'Microphone permission permanently denied. Please enable in settings.');
return false;
}
if (!status.isGranted) {
_emitError(VoiceError.permissionDenied,
'Microphone permission denied.');
return false;
}
return true;
}
/// Initialize the speech recognition
Future<bool> initialize() async {
if (_isInitialized) return true;
_setStatus(VoiceStatus.initializing);
// Check permission first
final hasPermission = await requestPermission();
if (!hasPermission) {
_setStatus(VoiceStatus.error);
return false;
}
await _speechToText.listen(
onResult: onResult,
localeId: localeId,
listenMode: ListenMode.dictation,
cancelOnError: true,
partialResults: true,
);
try {
_isInitialized = await _speechToText.initialize(
onError: _handleError,
onStatus: _handleStatus,
debugLogging: false,
);
if (!_isInitialized) {
_emitError(VoiceError.notAvailable,
'Speech recognition not available on this device.');
_setStatus(VoiceStatus.error);
return false;
}
_setStatus(VoiceStatus.ready);
return true;
} catch (e) {
_emitError(VoiceError.unknown, 'Failed to initialize: $e');
_setStatus(VoiceStatus.error);
return false;
}
}
/// Start listening for speech
Future<bool> startListening({
String? localeId,
bool continuous = false,
}) async {
if (!_isInitialized) {
final success = await initialize();
if (!success) return false;
}
if (_speechToText.isListening) {
await stopListening();
}
_continuousMode = continuous;
_currentLocale = localeId ?? _currentLocale;
_setStatus(VoiceStatus.listening);
try {
await _speechToText.listen(
onResult: _handleResult,
onSoundLevelChange: _handleSoundLevel,
localeId: _currentLocale,
listenFor: continuous ? const Duration(minutes: 5) : const Duration(seconds: 30),
pauseFor: continuous ? const Duration(seconds: 5) : const Duration(seconds: 3),
listenOptions: SpeechListenOptions(
listenMode: continuous ? ListenMode.dictation : ListenMode.search,
cancelOnError: !continuous,
partialResults: true,
autoPunctuation: true,
enableHapticFeedback: true,
),
);
return true;
} catch (e) {
_emitError(VoiceError.recognitionFailed, 'Failed to start listening: $e');
_setStatus(VoiceStatus.error);
return false;
}
}
/// Stop listening
Future<void> stopListening() async {
await _speechToText.stop();
_setStatus(VoiceStatus.ready);
}
/// Cancel listening
/// Cancel listening (discards results)
Future<void> cancelListening() async {
await _speechToText.cancel();
_setStatus(VoiceStatus.ready);
}
/// Toggle listening state
Future<bool> toggleListening({
String? localeId,
bool continuous = false,
}) async {
if (_speechToText.isListening) {
await stopListening();
return false;
} else {
return await startListening(localeId: localeId, continuous: continuous);
}
}
/// Get available locales
@ -59,4 +219,131 @@ class VoiceService {
}
return await _speechToText.locales();
}
/// Set the locale
void setLocale(String localeId) {
_currentLocale = localeId;
}
/// Handle speech recognition result
void _handleResult(SpeechRecognitionResult result) {
final text = result.recognizedWords;
final isFinal = result.finalResult;
// Emit to stream
_transcriptController.add(text);
// Call callback
onResult?.call(text, isFinal);
// In continuous mode, restart listening after final result
if (isFinal && _continuousMode && !_speechToText.isListening) {
// Small delay before restarting
Future.delayed(const Duration(milliseconds: 100), () {
if (_continuousMode && _status != VoiceStatus.error) {
startListening(continuous: true);
}
});
}
if (isFinal && !_continuousMode) {
_setStatus(VoiceStatus.processing);
Future.delayed(const Duration(milliseconds: 300), () {
if (_status == VoiceStatus.processing) {
_setStatus(VoiceStatus.ready);
}
});
}
}
/// Handle sound level changes
void _handleSoundLevel(double level) {
// Normalize level to 0.0 - 1.0 range
// speech_to_text returns dB levels, typically -160 to 0
final normalizedLevel = ((level + 160) / 160).clamp(0.0, 1.0);
_soundLevelController.add(normalizedLevel);
onSoundLevel?.call(normalizedLevel);
}
/// Handle speech recognition errors
void _handleError(SpeechRecognitionError error) {
VoiceError voiceError;
String message;
switch (error.errorMsg) {
case 'error_no_match':
voiceError = VoiceError.recognitionFailed;
message = 'No speech detected. Please try again.';
break;
case 'error_speech_timeout':
voiceError = VoiceError.timeout;
message = 'Speech timeout. Please try again.';
break;
case 'error_audio':
voiceError = VoiceError.noMicrophone;
message = 'Microphone error. Please check your microphone.';
break;
case 'error_permission':
voiceError = VoiceError.permissionDenied;
message = 'Microphone permission denied.';
break;
default:
voiceError = VoiceError.unknown;
message = 'Speech recognition error: ${error.errorMsg}';
}
// Don't emit error in continuous mode for timeouts
if (_continuousMode && voiceError == VoiceError.timeout) {
// Just restart listening
startListening(continuous: true);
return;
}
_emitError(voiceError, message);
if (!_continuousMode) {
_setStatus(VoiceStatus.ready);
}
}
/// Handle speech recognition status changes
void _handleStatus(String status) {
switch (status) {
case 'listening':
_setStatus(VoiceStatus.listening);
break;
case 'notListening':
if (_status == VoiceStatus.listening) {
_setStatus(VoiceStatus.processing);
}
break;
case 'done':
if (!_continuousMode) {
_setStatus(VoiceStatus.ready);
}
break;
}
}
/// Set status and notify listeners
void _setStatus(VoiceStatus newStatus) {
if (_status != newStatus) {
_status = newStatus;
_statusController.add(newStatus);
onStatusChange?.call(newStatus);
}
}
/// Emit error to callback
void _emitError(VoiceError error, String message) {
onError?.call(error, message);
}
/// Dispose resources
void dispose() {
_speechToText.stop();
_statusController.close();
_transcriptController.close();
_soundLevelController.close();
}
}

View File

@ -6,11 +6,13 @@ import FlutterMacOS
import Foundation
import local_auth_darwin
import shared_preferences_foundation
import speech_to_text
import webview_flutter_wkwebview
func RegisterGeneratedPlugins(registry: FlutterPluginRegistry) {
FLALocalAuthPlugin.register(with: registry.registrar(forPlugin: "FLALocalAuthPlugin"))
SharedPreferencesPlugin.register(with: registry.registrar(forPlugin: "SharedPreferencesPlugin"))
SpeechToTextPlugin.register(with: registry.registrar(forPlugin: "SpeechToTextPlugin"))
WebViewFlutterPlugin.register(with: registry.registrar(forPlugin: "WebViewFlutterPlugin"))
}

View File

@ -105,6 +105,22 @@ packages:
url: "https://pub.dev"
source: hosted
version: "1.3.1"
ffi:
dependency: transitive
description:
name: ffi
sha256: "16ed7b077ef01ad6170a3d0c57caa4a112a38d7a2ed5602e0aca9ca6f3d98da6"
url: "https://pub.dev"
source: hosted
version: "2.1.3"
file:
dependency: transitive
description:
name: file
sha256: a3b4f84adafef897088c160faf7dfffb7696046cb13ae90b508c2cbc95d3b8d4
url: "https://pub.dev"
source: hosted
version: "7.0.1"
flutter:
dependency: "direct main"
description: flutter
@ -272,6 +288,30 @@ packages:
url: "https://pub.dev"
source: hosted
version: "1.9.0"
path_provider_linux:
dependency: transitive
description:
name: path_provider_linux
sha256: f7a1fe3a634fe7734c8d3f2766ad746ae2a2884abe22e241a8b301bf5cac3279
url: "https://pub.dev"
source: hosted
version: "2.2.1"
path_provider_platform_interface:
dependency: transitive
description:
name: path_provider_platform_interface
sha256: "88f5779f72ba699763fa3a3b06aa4bf6de76c8e5de842cf6f29e2e06476c2334"
url: "https://pub.dev"
source: hosted
version: "2.1.2"
path_provider_windows:
dependency: transitive
description:
name: path_provider_windows
sha256: bd6f00dbd873bfb70d0761682da2b3a2c2fccc2b9e84c495821639601d81afe7
url: "https://pub.dev"
source: hosted
version: "2.3.0"
pedantic:
dependency: transitive
description:
@ -328,6 +368,14 @@ packages:
url: "https://pub.dev"
source: hosted
version: "0.2.1"
platform:
dependency: transitive
description:
name: platform
sha256: "5d6b1b0036a5f331ebc77c850ebc8506cbc1e9416c27e59b439f917a902a4984"
url: "https://pub.dev"
source: hosted
version: "3.1.6"
plugin_platform_interface:
dependency: transitive
description:
@ -336,6 +384,62 @@ packages:
url: "https://pub.dev"
source: hosted
version: "2.1.8"
shared_preferences:
dependency: "direct main"
description:
name: shared_preferences
sha256: "6e8bf70b7fef813df4e9a36f658ac46d107db4b4cfe1048b477d4e453a8159f5"
url: "https://pub.dev"
source: hosted
version: "2.5.3"
shared_preferences_android:
dependency: transitive
description:
name: shared_preferences_android
sha256: "9f9f3d372d4304723e6136663bb291c0b93f5e4c8a4a6314347f481a33bda2b1"
url: "https://pub.dev"
source: hosted
version: "2.4.7"
shared_preferences_foundation:
dependency: transitive
description:
name: shared_preferences_foundation
sha256: "6a52cfcdaeac77cad8c97b539ff688ccfc458c007b4db12be584fbe5c0e49e03"
url: "https://pub.dev"
source: hosted
version: "2.5.4"
shared_preferences_linux:
dependency: transitive
description:
name: shared_preferences_linux
sha256: "580abfd40f415611503cae30adf626e6656dfb2f0cee8f465ece7b6defb40f2f"
url: "https://pub.dev"
source: hosted
version: "2.4.1"
shared_preferences_platform_interface:
dependency: transitive
description:
name: shared_preferences_platform_interface
sha256: "57cbf196c486bc2cf1f02b85784932c6094376284b3ad5779d1b1c6c6a816b80"
url: "https://pub.dev"
source: hosted
version: "2.4.1"
shared_preferences_web:
dependency: transitive
description:
name: shared_preferences_web
sha256: c49bd060261c9a3f0ff445892695d6212ff603ef3115edbb448509d407600019
url: "https://pub.dev"
source: hosted
version: "2.4.3"
shared_preferences_windows:
dependency: transitive
description:
name: shared_preferences_windows
sha256: "94ef0f72b2d71bc3e700e025db3710911bd51a71cefb65cc609dd0d9a982e3c1"
url: "https://pub.dev"
source: hosted
version: "2.4.1"
sky_engine:
dependency: transitive
description: flutter
@ -477,6 +581,14 @@ packages:
url: "https://pub.dev"
source: hosted
version: "3.22.0"
xdg_directories:
dependency: transitive
description:
name: xdg_directories
sha256: "7a3f37b05d989967cdddcbb571f1ea834867ae2faa29725fd085180e0883aa15"
url: "https://pub.dev"
source: hosted
version: "1.1.0"
sdks:
dart: ">=3.5.4 <4.0.0"
flutter: ">=3.24.0"

View File

@ -30,6 +30,9 @@ dependencies:
# Permissions
permission_handler: ^11.3.1
# Storage
shared_preferences: ^2.3.3
dev_dependencies:
flutter_test: