fix(stt): always use Whisper auto language detection, remove app language hint

Whisper detects language from audio content — speaks Chinese gets Chinese,
speaks English gets English. App language setting is irrelevant to STT.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
hailin 2026-03-07 00:03:58 -08:00
parent 4c7c05eb37
commit ecc64e0ff9
3 changed files with 6 additions and 12 deletions

View File

@ -153,20 +153,16 @@ class ChatRemoteDatasource {
/// Uploads an audio file to the voice-message endpoint.
/// Backend performs STT, interrupts any running task if needed, and
/// Transcribe audio to text (STT only, does NOT send to agent).
/// Returns the transcript string.
/// Pass language='auto' to let Whisper auto-detect (best for mixed-language).
/// No language hint is sent Whisper auto-detects from audio content.
Future<String> transcribeAudio({
required String audioPath,
String language = 'zh',
}) async {
final fields = <String, dynamic>{
final formData = FormData.fromMap({
'audio': await MultipartFile.fromFile(
audioPath,
filename: audioPath.split('/').last,
),
};
if (language != 'auto') fields['language'] = language;
final formData = FormData.fromMap(fields);
});
final response = await _dio.post(
'${ApiEndpoints.agent}/transcribe',
data: formData,

View File

@ -13,7 +13,6 @@ import '../widgets/approval_action_card.dart';
import '../widgets/conversation_drawer.dart';
import '../../../agent_call/presentation/pages/agent_call_page.dart';
import '../widgets/voice_mic_button.dart';
import '../../../settings/presentation/providers/settings_providers.dart';
// ---------------------------------------------------------------------------
// Chat page Timeline workflow style (inspired by Claude Code VSCode)
@ -62,8 +61,7 @@ class _ChatPageState extends ConsumerState<ChatPage> {
_messageController.text = '识别中…';
});
try {
final language = ref.read(settingsProvider).language;
final text = await ref.read(chatProvider.notifier).transcribeAudio(audioPath, language: language);
final text = await ref.read(chatProvider.notifier).transcribeAudio(audioPath);
if (mounted) {
setState(() {
_messageController.text = text;

View File

@ -573,9 +573,9 @@ class ChatNotifier extends StateNotifier<ChatState> {
}
}
Future<String> transcribeAudio(String audioPath, {String language = 'zh'}) async {
Future<String> transcribeAudio(String audioPath) async {
final datasource = _ref.read(chatRemoteDatasourceProvider);
return datasource.transcribeAudio(audioPath: audioPath, language: language);
return datasource.transcribeAudio(audioPath: audioPath);
}
Future<void> cancelCurrentTask() async {