Remove final result logic from SDK library

Major SDK simplification by removing redundant final result processing:

1. YxAsrService changes:
   - Remove final result retrieval in stopListening()
   - Remove finalResult parameter from _sendResult()
   - Simplify stop logic to only reset stream state
   - Eliminate duplicate API calls that provided no additional value

2. SpeechRecognitionResult model changes:
   - Remove finalResult property and related logic
   - Update constructor, factory methods, toString, equals, hashCode
   - Remove finalResult from toMap/fromMap serialization
   - Simplify the model to focus on actual recognition data

3. Benefits:
   - Cleaner, more maintainable codebase
   - Reduced complexity and potential bugs
   - Better performance (no redundant API calls)
   - Simpler API for developers to use
   - Real-time text appending works seamlessly without artificial distinctions

The analysis showed that 'final results' were identical to the last real-time result,
making the distinction unnecessary. Now all results are treated uniformly as
real-time updates, providing a smoother and more intuitive user experience.
This commit is contained in:
Max 2025-09-09 11:29:19 +08:00
parent ed51fa89bd
commit 0af37c5b87
3 changed files with 27 additions and 60 deletions

View File

@ -475,29 +475,24 @@ class _SpeechRecognitionPageState extends State<SpeechRecognitionPage> {
'📱 [Example] RecordingButton 接收到识别结果: "${result.recognizedWords}"'); '📱 [Example] RecordingButton 接收到识别结果: "${result.recognizedWords}"');
setState(() { setState(() {
if (result.recognizedWords.isNotEmpty) { if (result.recognizedWords.isNotEmpty) {
if (result.finalResult) { //
// base text为下次录音做准备 print('📱 [Example] 实时识别,更新输入框: ${result.recognizedWords}');
print('📱 [Example] 最终识别结果,确认文本: ${result.recognizedWords}'); _currentText = result.recognizedWords;
_baseText = _textController.text; //
_currentText = ''; //
} else {
//
print('📱 [Example] 实时识别,更新输入框: ${result.recognizedWords}');
_currentText = result.recognizedWords;
// = + // = +
String newText = _baseText; String newText = _baseText;
if (newText.isNotEmpty && !newText.endsWith(' ') && _currentText.isNotEmpty) { if (newText.isNotEmpty &&
newText += ' '; // !newText.endsWith(' ') &&
} _currentText.isNotEmpty) {
newText += _currentText; newText += ' '; //
_textController.text = newText;
//
_textController.selection = TextSelection.fromPosition(
TextPosition(offset: newText.length),
);
} }
newText += _currentText;
_textController.text = newText;
//
_textController.selection = TextSelection.fromPosition(
TextPosition(offset: newText.length),
);
} }
}); });
}, },
@ -513,7 +508,7 @@ class _SpeechRecognitionPageState extends State<SpeechRecognitionPage> {
}); });
if (!isListening) { if (!isListening) {
// //
if (_currentText.isNotEmpty) { if (_currentText.isNotEmpty) {
setState(() { setState(() {
_recognitionHistory.insert(0, _currentText); _recognitionHistory.insert(0, _currentText);
@ -523,7 +518,11 @@ class _SpeechRecognitionPageState extends State<SpeechRecognitionPage> {
} }
}); });
} }
// _currentText //
setState(() {
_baseText = _textController.text; //
_currentText = ''; //
});
} else { } else {
// //
setState(() { setState(() {

View File

@ -9,14 +9,10 @@ class SpeechRecognitionResult {
/// ///
final List<String> alternatives; final List<String> alternatives;
/// true: , false:
final bool finalResult;
const SpeechRecognitionResult({ const SpeechRecognitionResult({
required this.recognizedWords, required this.recognizedWords,
this.confidence = 0.0, this.confidence = 0.0,
this.alternatives = const [], this.alternatives = const [],
this.finalResult = false,
}); });
/// Map [SpeechRecognitionResult] /// Map [SpeechRecognitionResult]
@ -25,7 +21,6 @@ class SpeechRecognitionResult {
recognizedWords: map['recognizedWords'] as String? ?? '', recognizedWords: map['recognizedWords'] as String? ?? '',
confidence: (map['confidence'] as num?)?.toDouble() ?? 0.0, confidence: (map['confidence'] as num?)?.toDouble() ?? 0.0,
alternatives: List<String>.from(map['alternatives'] as List? ?? []), alternatives: List<String>.from(map['alternatives'] as List? ?? []),
finalResult: map['finalResult'] as bool? ?? false,
); );
} }
@ -35,14 +30,13 @@ class SpeechRecognitionResult {
'recognizedWords': recognizedWords, 'recognizedWords': recognizedWords,
'confidence': confidence, 'confidence': confidence,
'alternatives': alternatives, 'alternatives': alternatives,
'finalResult': finalResult,
}; };
} }
@override @override
String toString() { String toString() {
return 'SpeechRecognitionResult(recognizedWords: $recognizedWords, ' return 'SpeechRecognitionResult(recognizedWords: $recognizedWords, '
'confidence: $confidence, alternatives: $alternatives, finalResult: $finalResult)'; 'confidence: $confidence, alternatives: $alternatives)';
} }
@override @override
@ -51,7 +45,6 @@ class SpeechRecognitionResult {
return other is SpeechRecognitionResult && return other is SpeechRecognitionResult &&
other.recognizedWords == recognizedWords && other.recognizedWords == recognizedWords &&
other.confidence == confidence && other.confidence == confidence &&
other.finalResult == finalResult &&
other.alternatives.length == alternatives.length && other.alternatives.length == alternatives.length &&
other.alternatives.every((alt) => alternatives.contains(alt)); other.alternatives.every((alt) => alternatives.contains(alt));
} }
@ -60,7 +53,6 @@ class SpeechRecognitionResult {
int get hashCode { int get hashCode {
return recognizedWords.hashCode ^ return recognizedWords.hashCode ^
confidence.hashCode ^ confidence.hashCode ^
finalResult.hashCode ^
alternatives.hashCode; alternatives.hashCode;
} }
} }

View File

@ -572,30 +572,8 @@ class YxAsrService implements SpeechRecognitionService {
// //
await _stopAudioRecording(); await _stopAudioRecording();
// //
if (_recognizer != null && _stream != null) { if (_stream != null) {
try {
// 线使
debugPrint('🛑 [YxAsr] 获取流式识别最终结果...');
final result = _recognizer!.getResult(_stream!);
debugPrint('🛑 [YxAsr] 流式最终结果: "${result.text}"');
if (result.text.isNotEmpty) {
debugPrint('📤 [YxAsr] 发送流式最终结果: ${result.text}');
_sendResult(
recognizedWords: result.text,
confidence: 1.0,
alternatives: [],
finalResult: true, //
);
} else {
debugPrint('⚠️ [YxAsr] 流式最终结果为空');
}
} catch (e) {
debugPrint('⚠️ [YxAsr] 获取最终结果时出错: $e');
}
//
_stream = null; _stream = null;
} }
@ -786,14 +764,12 @@ class YxAsrService implements SpeechRecognitionService {
required String recognizedWords, required String recognizedWords,
required double confidence, required double confidence,
required List<String> alternatives, required List<String> alternatives,
bool finalResult = false,
}) { }) {
debugPrint('📤 [YxAsr] 发送识别结果: "$recognizedWords"'); debugPrint('📤 [YxAsr] 发送识别结果: "$recognizedWords"');
final result = SpeechRecognitionResult( final result = SpeechRecognitionResult(
recognizedWords: recognizedWords, recognizedWords: recognizedWords,
confidence: confidence, confidence: confidence,
alternatives: alternatives, alternatives: alternatives,
finalResult: finalResult,
); );
_resultController.add(result); _resultController.add(result);
} }