it0/it0_app/lib/features/chat/presentation/providers/voice_providers.dart

150 lines
4.0 KiB
Dart

import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:speech_to_text/speech_to_text.dart' as stt;
// ---------------------------------------------------------------------------
// Voice input state
// ---------------------------------------------------------------------------
enum VoiceInputStatus { idle, initializing, listening, processing, error }
class VoiceInputState {
final VoiceInputStatus status;
final String recognizedText;
final String? error;
final bool isAvailable;
const VoiceInputState({
this.status = VoiceInputStatus.idle,
this.recognizedText = '',
this.error,
this.isAvailable = false,
});
bool get isListening => status == VoiceInputStatus.listening;
VoiceInputState copyWith({
VoiceInputStatus? status,
String? recognizedText,
String? error,
bool? isAvailable,
}) {
return VoiceInputState(
status: status ?? this.status,
recognizedText: recognizedText ?? this.recognizedText,
error: error,
isAvailable: isAvailable ?? this.isAvailable,
);
}
}
// ---------------------------------------------------------------------------
// Voice input notifier
// ---------------------------------------------------------------------------
class VoiceInputNotifier extends StateNotifier<VoiceInputState> {
final stt.SpeechToText _speech;
VoiceInputNotifier() : _speech = stt.SpeechToText(), super(const VoiceInputState()) {
_initialize();
}
Future<void> _initialize() async {
state = state.copyWith(status: VoiceInputStatus.initializing);
try {
final available = await _speech.initialize(
onStatus: _onStatus,
onError: (_) => _onError(),
);
state = state.copyWith(
status: VoiceInputStatus.idle,
isAvailable: available,
);
} catch (e) {
state = state.copyWith(
status: VoiceInputStatus.error,
error: 'Speech recognition unavailable: $e',
isAvailable: false,
);
}
}
/// Starts listening for speech input.
void startListening() {
if (!state.isAvailable) return;
state = state.copyWith(
status: VoiceInputStatus.listening,
recognizedText: '',
error: null,
);
_speech.listen(
onResult: (result) {
state = state.copyWith(recognizedText: result.recognizedWords);
if (result.finalResult) {
state = state.copyWith(status: VoiceInputStatus.processing);
}
},
listenFor: const Duration(seconds: 30),
pauseFor: const Duration(seconds: 3),
);
}
/// Stops listening and returns the recognized text.
String stopListening() {
_speech.stop();
final text = state.recognizedText;
state = state.copyWith(status: VoiceInputStatus.idle);
return text;
}
/// Cancels the current listening session without returning text.
void cancelListening() {
_speech.cancel();
state = state.copyWith(
status: VoiceInputStatus.idle,
recognizedText: '',
);
}
void _onStatus(String status) {
if (status == 'done' || status == 'notListening') {
if (state.recognizedText.isNotEmpty) {
this.state = this.state.copyWith(status: VoiceInputStatus.processing);
} else {
this.state = this.state.copyWith(status: VoiceInputStatus.idle);
}
}
}
void _onError() {
state = state.copyWith(
status: VoiceInputStatus.error,
error: 'Speech recognition error',
);
}
@override
void dispose() {
_speech.stop();
super.dispose();
}
}
// ---------------------------------------------------------------------------
// Providers
// ---------------------------------------------------------------------------
final voiceInputProvider =
StateNotifierProvider<VoiceInputNotifier, VoiceInputState>((ref) {
return VoiceInputNotifier();
});
final isListeningProvider = Provider<bool>((ref) {
return ref.watch(voiceInputProvider).isListening;
});
final voiceAvailableProvider = Provider<bool>((ref) {
return ref.watch(voiceInputProvider).isAvailable;
});