feat(chat): use app language setting for voice-to-text STT language

Reads settingsProvider.language (BCP-47 code) and passes it to the
Whisper transcribe call instead of hardcoding 'zh'.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
hailin 2026-03-06 08:11:21 -08:00
parent 0aac693b5d
commit 23675fa5a5
2 changed files with 5 additions and 3 deletions

View File

@ -13,6 +13,7 @@ import '../widgets/approval_action_card.dart';
import '../widgets/conversation_drawer.dart';
import '../../../agent_call/presentation/pages/agent_call_page.dart';
import '../widgets/voice_mic_button.dart';
import '../../../settings/presentation/providers/settings_providers.dart';
// ---------------------------------------------------------------------------
// Chat page Timeline workflow style (inspired by Claude Code VSCode)
@ -61,7 +62,8 @@ class _ChatPageState extends ConsumerState<ChatPage> {
_messageController.text = '识别中…';
});
try {
final text = await ref.read(chatProvider.notifier).transcribeAudio(audioPath);
final language = ref.read(settingsProvider).language;
final text = await ref.read(chatProvider.notifier).transcribeAudio(audioPath, language: language);
if (mounted) {
setState(() {
_messageController.text = text;

View File

@ -573,9 +573,9 @@ class ChatNotifier extends StateNotifier<ChatState> {
}
}
Future<String> transcribeAudio(String audioPath) async {
Future<String> transcribeAudio(String audioPath, {String language = 'zh'}) async {
final datasource = _ref.read(chatRemoteDatasourceProvider);
return datasource.transcribeAudio(audioPath: audioPath);
return datasource.transcribeAudio(audioPath: audioPath, language: language);
}
Future<void> cancelCurrentTask() async {