Compare commits

..

No commits in common. "508244fac38f51e1266c7dd6b86537acb8a3c5e8" and "aba8b44ab8fcf3bd97f584b1c5ecd60990a4ab5b" have entirely different histories.

3 changed files with 112 additions and 17 deletions

View File

@ -40,10 +40,14 @@ class _SpeechRecognitionPageState extends State<SpeechRecognitionPage> {
bool _isListening = false;
String _currentText = '';
String _errorMessage = '';
List<String> _recognitionHistory = [];
///
String _baseText = '';
//
final List<String> _realtimeResults = []; //
@override
void initState() {
super.initState();
@ -148,9 +152,11 @@ class _SpeechRecognitionPageState extends State<SpeechRecognitionPage> {
);
}
///
void _clearContent() {
///
void _clearHistory() {
setState(() {
_recognitionHistory.clear();
_realtimeResults.clear();
_currentText = '';
_baseText = ''; //
_textController.clear();
@ -167,8 +173,8 @@ class _SpeechRecognitionPageState extends State<SpeechRecognitionPage> {
actions: [
IconButton(
icon: const Icon(Icons.clear_all),
onPressed: _clearContent,
tooltip: '清除内容',
onPressed: _clearHistory,
tooltip: '清除历史',
),
],
),
@ -183,6 +189,9 @@ class _SpeechRecognitionPageState extends State<SpeechRecognitionPage> {
//
_buildRecognitionCard(),
const SizedBox(height: 16),
//
Expanded(child: _buildHistoryCard()),
],
),
),
@ -387,6 +396,66 @@ class _SpeechRecognitionPageState extends State<SpeechRecognitionPage> {
);
}
///
Widget _buildHistoryCard() {
return Card(
child: Padding(
padding: const EdgeInsets.all(16.0),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
const Text(
'识别历史',
style: TextStyle(fontSize: 18, fontWeight: FontWeight.bold),
),
const SizedBox(height: 16),
Expanded(
child: _recognitionHistory.isEmpty
? Center(
child: Text(
'暂无识别历史',
style: TextStyle(
color: Colors.grey[600],
fontStyle: FontStyle.italic,
),
),
)
: ListView.builder(
itemCount: _recognitionHistory.length,
itemBuilder: (context, index) {
return Card(
margin: const EdgeInsets.only(bottom: 8.0),
child: ListTile(
leading: CircleAvatar(
backgroundColor: Theme.of(context).primaryColor,
child: Text(
'${index + 1}',
style: const TextStyle(color: Colors.white),
),
),
title: Text(_recognitionHistory[index]),
trailing: IconButton(
icon: const Icon(Icons.copy),
onPressed: () {
// TODO:
ScaffoldMessenger.of(context).showSnackBar(
const SnackBar(
content: Text('已复制到剪贴板'),
),
);
},
),
),
);
},
),
),
],
),
),
);
}
///
Widget _buildFloatingActionButton() {
if (!_isInitialized) {
@ -439,6 +508,16 @@ class _SpeechRecognitionPageState extends State<SpeechRecognitionPage> {
});
if (!isListening) {
//
if (_currentText.isNotEmpty) {
setState(() {
_recognitionHistory.insert(0, _currentText);
//
if (_recognitionHistory.length > 10) {
_recognitionHistory.removeLast();
}
});
}
//
setState(() {
_baseText = _textController.text; //
@ -448,6 +527,7 @@ class _SpeechRecognitionPageState extends State<SpeechRecognitionPage> {
//
setState(() {
_baseText = _textController.text; //
_realtimeResults.clear();
_currentText = ''; //
});
}

View File

@ -3,14 +3,24 @@ class SpeechRecognitionResult {
///
final String recognizedWords;
/// 0.0 1.0
final double confidence;
///
final List<String> alternatives;
const SpeechRecognitionResult({
required this.recognizedWords,
this.confidence = 0.0,
this.alternatives = const [],
});
/// Map [SpeechRecognitionResult]
factory SpeechRecognitionResult.fromMap(Map<String, dynamic> map) {
return SpeechRecognitionResult(
recognizedWords: map['recognizedWords'] as String? ?? '',
confidence: (map['confidence'] as num?)?.toDouble() ?? 0.0,
alternatives: List<String>.from(map['alternatives'] as List? ?? []),
);
}
@ -18,23 +28,31 @@ class SpeechRecognitionResult {
Map<String, dynamic> toMap() {
return {
'recognizedWords': recognizedWords,
'confidence': confidence,
'alternatives': alternatives,
};
}
@override
String toString() {
return 'SpeechRecognitionResult(recognizedWords: $recognizedWords)';
return 'SpeechRecognitionResult(recognizedWords: $recognizedWords, '
'confidence: $confidence, alternatives: $alternatives)';
}
@override
bool operator ==(Object other) {
if (identical(this, other)) return true;
return other is SpeechRecognitionResult &&
other.recognizedWords == recognizedWords;
other.recognizedWords == recognizedWords &&
other.confidence == confidence &&
other.alternatives.length == alternatives.length &&
other.alternatives.every((alt) => alternatives.contains(alt));
}
@override
int get hashCode {
return recognizedWords.hashCode;
return recognizedWords.hashCode ^
confidence.hashCode ^
alternatives.hashCode;
}
}

View File

@ -221,7 +221,6 @@ class YxAsrService implements SpeechRecognitionService {
bool _isStartingRecording = false; //
bool _isInitialized = false;
String _currentModelPath = '';
String _lastRecognizedText = ''; //
//
RecognitionSpeed _recognitionSpeed = RecognitionSpeed.fast;
@ -514,7 +513,6 @@ class YxAsrService implements SpeechRecognitionService {
await _startAudioRecording(_sampleRate.hz);
_isListening = true;
_lastRecognizedText = ''; //
_statusController.add(true);
//
@ -742,18 +740,13 @@ class YxAsrService implements SpeechRecognitionService {
final result = _recognizer!.getResult(_stream!);
debugPrint('🔍 [YxAsr] 获取识别结果: "${result.text}"');
//
if (result.text.isNotEmpty &&
partialResults &&
result.text != _lastRecognizedText) {
if (result.text.isNotEmpty && partialResults) {
debugPrint('🎤 [YxAsr] 发送实时识别结果: ${result.text}');
_lastRecognizedText = result.text; //
_sendResult(
recognizedWords: result.text,
confidence: 0.8,
alternatives: [],
);
} else if (result.text.isNotEmpty &&
result.text == _lastRecognizedText) {
debugPrint('🔄 [YxAsr] 跳过重复识别结果: "${result.text}"');
}
//
@ -769,10 +762,14 @@ class YxAsrService implements SpeechRecognitionService {
///
void _sendResult({
required String recognizedWords,
required double confidence,
required List<String> alternatives,
}) {
debugPrint('📤 [YxAsr] 发送识别结果: "$recognizedWords"');
final result = SpeechRecognitionResult(
recognizedWords: recognizedWords,
confidence: confidence,
alternatives: alternatives,
);
_resultController.add(result);
}