feat: add Markdown rendering and phone-call voice entry to chat UI

Chat message rendering:
- MessageBubble: replace plain SelectableText with MarkdownBody for
  assistant messages, with full dark-theme stylesheet (headers, code
  blocks, tables, blockquotes, list bullets)
- StreamTextWidget: render completed messages as MarkdownBody, keep
  plain-text + blinking cursor for actively streaming messages

Voice interaction redesign:
- Remove all long-press-to-record code (~100 lines): AudioRecorder,
  SpeechEnhancer, mic pulse animation, voice indicator bar,
  SingleTickerProviderStateMixin
- Add phone-call button in AppBar (Icons.call) that navigates to the
  existing AgentCallPage for full-duplex voice conversation
- Add prominent "语音通话" entry button on empty chat state
- AgentCallPage was already fully implemented (ringing → connecting →
  active → ended, dual-direction WebSocket audio, GTCRN denoise,
  Kokoro TTS playback, waveform visualization) but previously unused

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
hailin 2026-02-23 07:31:40 -08:00
parent bed17f32f9
commit dfc541b571
3 changed files with 102 additions and 188 deletions

View File

@ -1,13 +1,5 @@
import 'dart:async';
import 'dart:typed_data';
import 'package:dio/dio.dart' show FormData, MultipartFile;
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart'; import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:record/record.dart';
import '../../../../core/audio/noise_reducer.dart';
import '../../../../core/audio/speech_enhancer.dart';
import '../../../../core/config/api_endpoints.dart';
import '../../../../core/network/dio_client.dart';
import '../../../../core/theme/app_colors.dart'; import '../../../../core/theme/app_colors.dart';
import '../../domain/entities/chat_message.dart'; import '../../domain/entities/chat_message.dart';
import '../providers/chat_providers.dart'; import '../providers/chat_providers.dart';
@ -16,6 +8,7 @@ import '../widgets/tool_execution_card.dart';
import '../widgets/approval_action_card.dart'; import '../widgets/approval_action_card.dart';
import '../widgets/agent_thinking_indicator.dart'; import '../widgets/agent_thinking_indicator.dart';
import '../widgets/stream_text_widget.dart'; import '../widgets/stream_text_widget.dart';
import '../../../agent_call/presentation/pages/agent_call_page.dart';
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Chat page ConsumerStatefulWidget // Chat page ConsumerStatefulWidget
@ -28,108 +21,10 @@ class ChatPage extends ConsumerStatefulWidget {
ConsumerState<ChatPage> createState() => _ChatPageState(); ConsumerState<ChatPage> createState() => _ChatPageState();
} }
class _ChatPageState extends ConsumerState<ChatPage> with SingleTickerProviderStateMixin { class _ChatPageState extends ConsumerState<ChatPage> {
final _messageController = TextEditingController(); final _messageController = TextEditingController();
final _scrollController = ScrollController(); final _scrollController = ScrollController();
// -- Voice input (record + GTCRN denoise + backend STT) -------------------
late final AudioRecorder _recorder;
final SpeechEnhancer _enhancer = SpeechEnhancer();
bool _isListening = false;
bool _isTranscribing = false;
List<List<int>> _audioChunks = [];
StreamSubscription<List<int>>? _audioSubscription;
late AnimationController _micPulseController;
@override
void initState() {
super.initState();
_recorder = AudioRecorder();
_enhancer.init(); // load GTCRN model in background
_micPulseController = AnimationController(
vsync: this,
duration: const Duration(milliseconds: 800),
);
}
Future<void> _startListening() async {
final hasPermission = await _recorder.hasPermission();
if (!hasPermission || !mounted) return;
setState(() => _isListening = true);
_micPulseController.repeat(reverse: true);
_audioChunks = [];
// Stream raw PCM 16kHz mono with platform noise suppression + AGC
final stream = await _recorder.startStream(const RecordConfig(
encoder: AudioEncoder.pcm16bits,
sampleRate: 16000,
numChannels: 1,
noiseSuppress: true,
autoGain: true,
));
_audioSubscription = stream.listen((data) {
_audioChunks.add(data);
});
}
Future<void> _stopListening({bool autoSubmit = false}) async {
if (!_isListening) return;
// Stop recording and stream
await _recorder.stop();
await _audioSubscription?.cancel();
_audioSubscription = null;
_micPulseController.stop();
_micPulseController.reset();
if (!mounted) return;
setState(() => _isListening = false);
if (!autoSubmit || _audioChunks.isEmpty) return;
// Transcribe via backend
setState(() => _isTranscribing = true);
try {
// Combine recorded chunks into a single PCM buffer
final allBytes = _audioChunks.expand((c) => c).toList();
final pcmData = Uint8List.fromList(allBytes);
_audioChunks = [];
// GTCRN ML denoise (light) + trim leading/trailing silence
final denoised = _enhancer.enhance(pcmData);
final trimmed = NoiseReducer.trimSilence(denoised);
if (trimmed.isEmpty) {
if (mounted) setState(() => _isTranscribing = false);
return;
}
// POST to backend /voice/transcribe
final dio = ref.read(dioClientProvider);
final formData = FormData.fromMap({
'audio': MultipartFile.fromBytes(trimmed, filename: 'audio.pcm'),
});
final response = await dio.post(
ApiEndpoints.transcribe,
data: formData,
);
final text =
(response.data as Map<String, dynamic>)['text'] as String? ?? '';
if (text.isNotEmpty && mounted) {
_messageController.text = text;
_send();
}
} catch (_) {
// Voice failed silently user can still type
} finally {
if (mounted) setState(() => _isTranscribing = false);
}
}
// -- Send ------------------------------------------------------------------ // -- Send ------------------------------------------------------------------
void _send() { void _send() {
@ -152,6 +47,12 @@ class _ChatPageState extends ConsumerState<ChatPage> with SingleTickerProviderSt
}); });
} }
void _openVoiceCall() {
Navigator.of(context).push(
MaterialPageRoute(builder: (_) => const AgentCallPage()),
);
}
// -- Message widget dispatch ----------------------------------------------- // -- Message widget dispatch -----------------------------------------------
Widget _buildMessageWidget(ChatMessage message, ChatState chatState) { Widget _buildMessageWidget(ChatMessage message, ChatState chatState) {
@ -295,31 +196,11 @@ class _ChatPageState extends ConsumerState<ChatPage> with SingleTickerProviderSt
tooltip: '清空对话', tooltip: '清空对话',
onPressed: () => ref.read(chatProvider.notifier).clearChat(), onPressed: () => ref.read(chatProvider.notifier).clearChat(),
), ),
// Voice input button // Voice call button
GestureDetector( IconButton(
onLongPressStart: (_) => _startListening(), icon: const Icon(Icons.call),
onLongPressEnd: (_) => _stopListening(autoSubmit: true), tooltip: '语音通话',
child: AnimatedBuilder( onPressed: _openVoiceCall,
animation: _micPulseController,
builder: (context, child) {
return IconButton(
icon: Icon(
_isListening ? Icons.mic : Icons.mic_none,
color: _isListening
? Color.lerp(AppColors.error, AppColors.warning, _micPulseController.value)
: null,
size: _isListening ? 28 + (_micPulseController.value * 4) : 24,
),
onPressed: () {
if (_isListening) {
_stopListening(autoSubmit: true);
} else {
_startListening();
}
},
);
},
),
), ),
], ],
), ),
@ -358,9 +239,20 @@ class _ChatPageState extends ConsumerState<ChatPage> with SingleTickerProviderSt
), ),
const SizedBox(height: 8), const SizedBox(height: 8),
Text( Text(
'长按麦克风按钮进行语音输入', '输入文字或拨打语音通话',
style: TextStyle(color: AppColors.textMuted, fontSize: 13), style: TextStyle(color: AppColors.textMuted, fontSize: 13),
), ),
const SizedBox(height: 24),
OutlinedButton.icon(
onPressed: _openVoiceCall,
icon: const Icon(Icons.call),
label: const Text('语音通话'),
style: OutlinedButton.styleFrom(
foregroundColor: AppColors.success,
side: const BorderSide(color: AppColors.success),
padding: const EdgeInsets.symmetric(horizontal: 24, vertical: 12),
),
),
], ],
), ),
) )
@ -382,46 +274,6 @@ class _ChatPageState extends ConsumerState<ChatPage> with SingleTickerProviderSt
), ),
), ),
// Voice listening / transcribing indicator
if (_isListening || _isTranscribing)
Container(
padding: const EdgeInsets.symmetric(vertical: 8, horizontal: 16),
color: (_isListening ? AppColors.error : AppColors.primary)
.withOpacity(0.1),
child: Row(
children: [
if (_isListening)
AnimatedBuilder(
animation: _micPulseController,
builder: (context, _) => Icon(
Icons.mic,
color: AppColors.error,
size: 20 + (_micPulseController.value * 4),
),
)
else
const SizedBox(
width: 20,
height: 20,
child: CircularProgressIndicator(strokeWidth: 2),
),
const SizedBox(width: 8),
Text(
_isListening ? '正在聆听...' : '正在转写...',
style: TextStyle(
color: _isListening ? AppColors.error : AppColors.primary,
),
),
const Spacer(),
if (_isListening)
TextButton(
onPressed: () => _stopListening(),
child: const Text('取消'),
),
],
),
),
// Input row // Input row
_buildInputArea(chatState), _buildInputArea(chatState),
], ],
@ -491,10 +343,6 @@ class _ChatPageState extends ConsumerState<ChatPage> with SingleTickerProviderSt
void dispose() { void dispose() {
_messageController.dispose(); _messageController.dispose();
_scrollController.dispose(); _scrollController.dispose();
_micPulseController.dispose();
_audioSubscription?.cancel();
_recorder.dispose();
_enhancer.dispose();
super.dispose(); super.dispose();
} }
} }

View File

@ -1,9 +1,10 @@
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:flutter_markdown/flutter_markdown.dart';
import '../../../../core/theme/app_colors.dart'; import '../../../../core/theme/app_colors.dart';
import '../../domain/entities/chat_message.dart'; import '../../domain/entities/chat_message.dart';
/// Renders a single chat message bubble with appropriate styling /// Renders a single chat message bubble with appropriate styling
/// for user and assistant messages. /// for user and assistant messages. Assistant messages render Markdown.
class MessageBubble extends StatelessWidget { class MessageBubble extends StatelessWidget {
final ChatMessage message; final ChatMessage message;
@ -47,13 +48,20 @@ class MessageBubble extends StatelessWidget {
), ),
), ),
// Message content // Message content Markdown for assistant, plain text for user
if (_isUser)
SelectableText( SelectableText(
message.content, message.content,
style: TextStyle( style: const TextStyle(
color: _isUser ? Colors.white : AppColors.textPrimary, color: Colors.white,
fontSize: 15, fontSize: 15,
), ),
)
else
MarkdownBody(
data: message.content,
selectable: true,
styleSheet: _markdownStyleSheet(context),
), ),
// Timestamp // Timestamp
@ -75,6 +83,37 @@ class MessageBubble extends StatelessWidget {
); );
} }
MarkdownStyleSheet _markdownStyleSheet(BuildContext context) {
return MarkdownStyleSheet(
p: const TextStyle(color: AppColors.textPrimary, fontSize: 15),
h1: const TextStyle(color: AppColors.textPrimary, fontSize: 22, fontWeight: FontWeight.bold),
h2: const TextStyle(color: AppColors.textPrimary, fontSize: 19, fontWeight: FontWeight.bold),
h3: const TextStyle(color: AppColors.textPrimary, fontSize: 17, fontWeight: FontWeight.w600),
strong: const TextStyle(color: AppColors.textPrimary, fontWeight: FontWeight.bold),
em: const TextStyle(color: AppColors.textSecondary, fontStyle: FontStyle.italic),
code: TextStyle(
color: AppColors.secondary,
backgroundColor: AppColors.background.withOpacity(0.5),
fontSize: 13,
fontFamily: 'monospace',
),
codeblockDecoration: BoxDecoration(
color: AppColors.background.withOpacity(0.6),
borderRadius: BorderRadius.circular(8),
),
codeblockPadding: const EdgeInsets.all(10),
blockquoteDecoration: BoxDecoration(
border: Border(left: BorderSide(color: AppColors.primary, width: 3)),
),
blockquotePadding: const EdgeInsets.only(left: 12, top: 4, bottom: 4),
tableBorder: TableBorder.all(color: AppColors.surfaceLight, width: 0.5),
tableHead: const TextStyle(color: AppColors.textPrimary, fontWeight: FontWeight.bold, fontSize: 13),
tableBody: const TextStyle(color: AppColors.textSecondary, fontSize: 13),
tableCellsPadding: const EdgeInsets.symmetric(horizontal: 8, vertical: 4),
listBullet: const TextStyle(color: AppColors.textSecondary, fontSize: 15),
);
}
String _formatTime(DateTime time) { String _formatTime(DateTime time) {
final hour = time.hour.toString().padLeft(2, '0'); final hour = time.hour.toString().padLeft(2, '0');
final minute = time.minute.toString().padLeft(2, '0'); final minute = time.minute.toString().padLeft(2, '0');

View File

@ -1,8 +1,10 @@
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:flutter_markdown/flutter_markdown.dart';
import '../../../../core/theme/app_colors.dart'; import '../../../../core/theme/app_colors.dart';
/// Widget that renders streaming text with an animated cursor at the end, /// Widget that renders streaming text with an animated cursor at the end,
/// giving the appearance of real-time text generation. /// giving the appearance of real-time text generation.
/// When streaming completes, renders Markdown.
class StreamTextWidget extends StatefulWidget { class StreamTextWidget extends StatefulWidget {
final String text; final String text;
final bool isStreaming; final bool isStreaming;
@ -46,13 +48,38 @@ class _StreamTextWidgetState extends State<StreamTextWidget>
); );
final effectiveStyle = widget.style ?? defaultStyle; final effectiveStyle = widget.style ?? defaultStyle;
// Streaming complete render full Markdown
if (!widget.isStreaming) { if (!widget.isStreaming) {
return SelectableText( return MarkdownBody(
widget.text, data: widget.text,
style: effectiveStyle, selectable: true,
styleSheet: MarkdownStyleSheet(
p: effectiveStyle,
h1: effectiveStyle.copyWith(fontSize: 22, fontWeight: FontWeight.bold),
h2: effectiveStyle.copyWith(fontSize: 19, fontWeight: FontWeight.bold),
h3: effectiveStyle.copyWith(fontSize: 17, fontWeight: FontWeight.w600),
strong: effectiveStyle.copyWith(fontWeight: FontWeight.bold),
em: effectiveStyle.copyWith(fontStyle: FontStyle.italic, color: AppColors.textSecondary),
code: TextStyle(
color: AppColors.secondary,
backgroundColor: AppColors.background.withOpacity(0.5),
fontSize: 13,
fontFamily: 'monospace',
),
codeblockDecoration: BoxDecoration(
color: AppColors.background.withOpacity(0.6),
borderRadius: BorderRadius.circular(8),
),
codeblockPadding: const EdgeInsets.all(10),
tableBorder: TableBorder.all(color: AppColors.surfaceLight, width: 0.5),
tableHead: effectiveStyle.copyWith(fontWeight: FontWeight.bold, fontSize: 13),
tableBody: TextStyle(color: AppColors.textSecondary, fontSize: 13),
tableCellsPadding: const EdgeInsets.symmetric(horizontal: 8, vertical: 4),
),
); );
} }
// Still streaming show plain text with blinking cursor
return AnimatedBuilder( return AnimatedBuilder(
animation: _cursorController, animation: _cursorController,
builder: (context, _) { builder: (context, _) {