Initial commit: Flutter speech-to-text plugin with Sherpa-ONNX integration

This commit is contained in:
Max 2025-08-27 17:09:36 +08:00
commit 5c9a8ea6e6
190 changed files with 27947 additions and 0 deletions

View File

@ -0,0 +1,31 @@
Extension Discovery Cache
=========================
This folder is used by `package:extension_discovery` to cache lists of
packages that contains extensions for other packages.
DO NOT USE THIS FOLDER
----------------------
* Do not read (or rely) the contents of this folder.
* Do write to this folder.
If you're interested in the lists of extensions stored in this folder use the
API offered by package `extension_discovery` to get this information.
If this package doesn't work for your use-case, then don't try to read the
contents of this folder. It may change, and will not remain stable.
Use package `extension_discovery`
---------------------------------
If you want to access information from this folder.
Feel free to delete this folder
-------------------------------
Files in this folder act as a cache, and the cache is discarded if the files
are older than the modification time of `.dart_tool/package_config.json`.
Hence, it should never be necessary to clear this cache manually, if you find a
need to do please file a bug.

View File

@ -0,0 +1 @@
{"version":2,"entries":[{"package":"yx_asr","rootUri":"../","packageUri":"lib/"}]}

View File

@ -0,0 +1 @@
{"version":2,"entries":[{"package":"yx_asr","rootUri":"../","packageUri":"lib/"}]}

View File

@ -0,0 +1,32 @@
//
// Generated file. Do not edit.
// This file is generated from template in file `flutter_tools/lib/src/flutter_plugins.dart`.
//
// @dart = 3.0
import 'dart:io'; // flutter_ignore: dart_io_import.
import 'package:record_linux/record_linux.dart';
@pragma('vm:entry-point')
class _PluginRegistrant {
@pragma('vm:entry-point')
static void register() {
if (Platform.isAndroid) {
} else if (Platform.isIOS) {
} else if (Platform.isLinux) {
try {
RecordLinux.registerWith();
} catch (err) {
print(
'`record_linux` threw an error: $err. '
'The app may not function as expected until you remove this plugin from pubspec.yaml'
);
}
} else if (Platform.isMacOS) {
} else if (Platform.isWindows) {
}
}
}

View File

@ -0,0 +1,436 @@
{
"configVersion": 2,
"packages": [
{
"name": "async",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/async-2.13.0",
"packageUri": "lib/",
"languageVersion": "3.4"
},
{
"name": "boolean_selector",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/boolean_selector-2.1.2",
"packageUri": "lib/",
"languageVersion": "3.1"
},
{
"name": "characters",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/characters-1.4.0",
"packageUri": "lib/",
"languageVersion": "3.4"
},
{
"name": "clock",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/clock-1.1.2",
"packageUri": "lib/",
"languageVersion": "3.4"
},
{
"name": "collection",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/collection-1.19.1",
"packageUri": "lib/",
"languageVersion": "3.4"
},
{
"name": "crypto",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/crypto-3.0.6",
"packageUri": "lib/",
"languageVersion": "3.4"
},
{
"name": "fake_async",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/fake_async-1.3.3",
"packageUri": "lib/",
"languageVersion": "3.3"
},
{
"name": "ffi",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/ffi-2.1.4",
"packageUri": "lib/",
"languageVersion": "3.7"
},
{
"name": "file",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/file-7.0.1",
"packageUri": "lib/",
"languageVersion": "3.0"
},
{
"name": "fixnum",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/fixnum-1.1.1",
"packageUri": "lib/",
"languageVersion": "3.1"
},
{
"name": "flutter",
"rootUri": "file:///Users/max/fvm/versions/3.32.0/packages/flutter",
"packageUri": "lib/",
"languageVersion": "3.7"
},
{
"name": "flutter_driver",
"rootUri": "file:///Users/max/fvm/versions/3.32.0/packages/flutter_driver",
"packageUri": "lib/",
"languageVersion": "3.7"
},
{
"name": "flutter_lints",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/flutter_lints-3.0.2",
"packageUri": "lib/",
"languageVersion": "3.1"
},
{
"name": "flutter_test",
"rootUri": "file:///Users/max/fvm/versions/3.32.0/packages/flutter_test",
"packageUri": "lib/",
"languageVersion": "3.7"
},
{
"name": "flutter_web_plugins",
"rootUri": "file:///Users/max/fvm/versions/3.32.0/packages/flutter_web_plugins",
"packageUri": "lib/",
"languageVersion": "3.7"
},
{
"name": "fuchsia_remote_debug_protocol",
"rootUri": "file:///Users/max/fvm/versions/3.32.0/packages/fuchsia_remote_debug_protocol",
"packageUri": "lib/",
"languageVersion": "3.7"
},
{
"name": "integration_test",
"rootUri": "file:///Users/max/fvm/versions/3.32.0/packages/integration_test",
"packageUri": "lib/",
"languageVersion": "3.7"
},
{
"name": "leak_tracker",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/leak_tracker-10.0.9",
"packageUri": "lib/",
"languageVersion": "3.2"
},
{
"name": "leak_tracker_flutter_testing",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/leak_tracker_flutter_testing-3.0.9",
"packageUri": "lib/",
"languageVersion": "3.2"
},
{
"name": "leak_tracker_testing",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/leak_tracker_testing-3.0.1",
"packageUri": "lib/",
"languageVersion": "3.2"
},
{
"name": "lints",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/lints-3.0.0",
"packageUri": "lib/",
"languageVersion": "3.0"
},
{
"name": "matcher",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/matcher-0.12.17",
"packageUri": "lib/",
"languageVersion": "3.4"
},
{
"name": "material_color_utilities",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/material_color_utilities-0.11.1",
"packageUri": "lib/",
"languageVersion": "2.17"
},
{
"name": "meta",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/meta-1.16.0",
"packageUri": "lib/",
"languageVersion": "2.12"
},
{
"name": "path",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/path-1.9.1",
"packageUri": "lib/",
"languageVersion": "3.4"
},
{
"name": "path_provider",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/path_provider-2.1.5",
"packageUri": "lib/",
"languageVersion": "3.4"
},
{
"name": "path_provider_android",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/path_provider_android-2.2.17",
"packageUri": "lib/",
"languageVersion": "3.6"
},
{
"name": "path_provider_foundation",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/path_provider_foundation-2.4.2",
"packageUri": "lib/",
"languageVersion": "3.7"
},
{
"name": "path_provider_linux",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/path_provider_linux-2.2.1",
"packageUri": "lib/",
"languageVersion": "2.19"
},
{
"name": "path_provider_platform_interface",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/path_provider_platform_interface-2.1.2",
"packageUri": "lib/",
"languageVersion": "3.0"
},
{
"name": "path_provider_windows",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/path_provider_windows-2.3.0",
"packageUri": "lib/",
"languageVersion": "3.2"
},
{
"name": "permission_handler",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/permission_handler-12.0.1",
"packageUri": "lib/",
"languageVersion": "3.5"
},
{
"name": "permission_handler_android",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/permission_handler_android-13.0.1",
"packageUri": "lib/",
"languageVersion": "3.5"
},
{
"name": "permission_handler_apple",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/permission_handler_apple-9.4.7",
"packageUri": "lib/",
"languageVersion": "2.18"
},
{
"name": "permission_handler_html",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/permission_handler_html-0.1.3+5",
"packageUri": "lib/",
"languageVersion": "3.3"
},
{
"name": "permission_handler_platform_interface",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/permission_handler_platform_interface-4.3.0",
"packageUri": "lib/",
"languageVersion": "3.5"
},
{
"name": "permission_handler_windows",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/permission_handler_windows-0.2.1",
"packageUri": "lib/",
"languageVersion": "2.12"
},
{
"name": "platform",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/platform-3.1.6",
"packageUri": "lib/",
"languageVersion": "3.2"
},
{
"name": "plugin_platform_interface",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/plugin_platform_interface-2.1.8",
"packageUri": "lib/",
"languageVersion": "3.0"
},
{
"name": "process",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/process-5.0.3",
"packageUri": "lib/",
"languageVersion": "3.3"
},
{
"name": "record",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record-6.1.1",
"packageUri": "lib/",
"languageVersion": "3.5"
},
{
"name": "record_android",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record_android-1.4.1",
"packageUri": "lib/",
"languageVersion": "3.5"
},
{
"name": "record_ios",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record_ios-1.1.2",
"packageUri": "lib/",
"languageVersion": "3.5"
},
{
"name": "record_linux",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record_linux-1.2.1",
"packageUri": "lib/",
"languageVersion": "3.5"
},
{
"name": "record_macos",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record_macos-1.1.1",
"packageUri": "lib/",
"languageVersion": "3.5"
},
{
"name": "record_platform_interface",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record_platform_interface-1.4.0",
"packageUri": "lib/",
"languageVersion": "3.5"
},
{
"name": "record_web",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record_web-1.2.0",
"packageUri": "lib/",
"languageVersion": "3.5"
},
{
"name": "record_windows",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record_windows-1.0.7",
"packageUri": "lib/",
"languageVersion": "3.5"
},
{
"name": "sherpa_onnx",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sherpa_onnx-1.12.10",
"packageUri": "lib/",
"languageVersion": "3.1"
},
{
"name": "sherpa_onnx_android",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sherpa_onnx_android-1.12.10",
"packageUri": "lib/",
"languageVersion": "3.0"
},
{
"name": "sherpa_onnx_ios",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sherpa_onnx_ios-1.12.10",
"packageUri": "lib/",
"languageVersion": "3.0"
},
{
"name": "sherpa_onnx_linux",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sherpa_onnx_linux-1.12.10",
"packageUri": "lib/",
"languageVersion": "3.0"
},
{
"name": "sherpa_onnx_macos",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sherpa_onnx_macos-1.12.10",
"packageUri": "lib/",
"languageVersion": "3.0"
},
{
"name": "sherpa_onnx_windows",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sherpa_onnx_windows-1.12.10",
"packageUri": "lib/",
"languageVersion": "3.0"
},
{
"name": "sky_engine",
"rootUri": "file:///Users/max/fvm/versions/3.32.0/bin/cache/pkg/sky_engine",
"packageUri": "lib/",
"languageVersion": "3.7"
},
{
"name": "source_span",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/source_span-1.10.1",
"packageUri": "lib/",
"languageVersion": "3.1"
},
{
"name": "sprintf",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sprintf-7.0.0",
"packageUri": "lib/",
"languageVersion": "2.12"
},
{
"name": "stack_trace",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/stack_trace-1.12.1",
"packageUri": "lib/",
"languageVersion": "3.4"
},
{
"name": "stream_channel",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/stream_channel-2.1.4",
"packageUri": "lib/",
"languageVersion": "3.3"
},
{
"name": "string_scanner",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/string_scanner-1.4.1",
"packageUri": "lib/",
"languageVersion": "3.1"
},
{
"name": "sync_http",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sync_http-0.3.1",
"packageUri": "lib/",
"languageVersion": "2.12"
},
{
"name": "term_glyph",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/term_glyph-1.2.2",
"packageUri": "lib/",
"languageVersion": "3.1"
},
{
"name": "test_api",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/test_api-0.7.4",
"packageUri": "lib/",
"languageVersion": "3.5"
},
{
"name": "typed_data",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/typed_data-1.4.0",
"packageUri": "lib/",
"languageVersion": "3.5"
},
{
"name": "uuid",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/uuid-4.5.1",
"packageUri": "lib/",
"languageVersion": "3.0"
},
{
"name": "vector_math",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/vector_math-2.1.4",
"packageUri": "lib/",
"languageVersion": "2.14"
},
{
"name": "vm_service",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/vm_service-15.0.0",
"packageUri": "lib/",
"languageVersion": "3.3"
},
{
"name": "web",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/web-1.1.1",
"packageUri": "lib/",
"languageVersion": "3.4"
},
{
"name": "webdriver",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/webdriver-3.1.0",
"packageUri": "lib/",
"languageVersion": "3.1"
},
{
"name": "xdg_directories",
"rootUri": "file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/xdg_directories-1.1.0",
"packageUri": "lib/",
"languageVersion": "3.3"
},
{
"name": "yx_asr",
"rootUri": "../",
"packageUri": "lib/",
"languageVersion": "3.0"
}
],
"generator": "pub",
"generatorVersion": "3.8.0",
"flutterRoot": "file:///Users/max/fvm/versions/3.32.0",
"flutterVersion": "3.32.0",
"pubCache": "file:///Users/max/.pub-cache"
}

View File

@ -0,0 +1,285 @@
async
3.4
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/async-2.13.0/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/async-2.13.0/lib/
boolean_selector
3.1
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/boolean_selector-2.1.2/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/boolean_selector-2.1.2/lib/
characters
3.4
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/characters-1.4.0/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/characters-1.4.0/lib/
clock
3.4
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/clock-1.1.2/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/clock-1.1.2/lib/
collection
3.4
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/collection-1.19.1/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/collection-1.19.1/lib/
crypto
3.4
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/crypto-3.0.6/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/crypto-3.0.6/lib/
fake_async
3.3
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/fake_async-1.3.3/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/fake_async-1.3.3/lib/
ffi
3.7
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/ffi-2.1.4/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/ffi-2.1.4/lib/
file
3.0
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/file-7.0.1/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/file-7.0.1/lib/
fixnum
3.1
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/fixnum-1.1.1/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/fixnum-1.1.1/lib/
flutter_lints
3.1
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/flutter_lints-3.0.2/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/flutter_lints-3.0.2/lib/
leak_tracker
3.2
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/leak_tracker-10.0.9/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/leak_tracker-10.0.9/lib/
leak_tracker_flutter_testing
3.2
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/leak_tracker_flutter_testing-3.0.9/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/leak_tracker_flutter_testing-3.0.9/lib/
leak_tracker_testing
3.2
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/leak_tracker_testing-3.0.1/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/leak_tracker_testing-3.0.1/lib/
lints
3.0
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/lints-3.0.0/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/lints-3.0.0/lib/
matcher
3.4
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/matcher-0.12.17/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/matcher-0.12.17/lib/
material_color_utilities
2.17
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/material_color_utilities-0.11.1/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/material_color_utilities-0.11.1/lib/
meta
2.12
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/meta-1.16.0/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/meta-1.16.0/lib/
path
3.4
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/path-1.9.1/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/path-1.9.1/lib/
path_provider
3.4
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/path_provider-2.1.5/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/path_provider-2.1.5/lib/
path_provider_android
3.6
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/path_provider_android-2.2.17/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/path_provider_android-2.2.17/lib/
path_provider_foundation
3.7
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/path_provider_foundation-2.4.2/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/path_provider_foundation-2.4.2/lib/
path_provider_linux
2.19
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/path_provider_linux-2.2.1/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/path_provider_linux-2.2.1/lib/
path_provider_platform_interface
3.0
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/path_provider_platform_interface-2.1.2/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/path_provider_platform_interface-2.1.2/lib/
path_provider_windows
3.2
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/path_provider_windows-2.3.0/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/path_provider_windows-2.3.0/lib/
permission_handler
3.5
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/permission_handler-12.0.1/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/permission_handler-12.0.1/lib/
permission_handler_android
3.5
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/permission_handler_android-13.0.1/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/permission_handler_android-13.0.1/lib/
permission_handler_apple
2.18
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/permission_handler_apple-9.4.7/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/permission_handler_apple-9.4.7/lib/
permission_handler_html
3.3
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/permission_handler_html-0.1.3+5/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/permission_handler_html-0.1.3+5/lib/
permission_handler_platform_interface
3.5
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/permission_handler_platform_interface-4.3.0/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/permission_handler_platform_interface-4.3.0/lib/
permission_handler_windows
2.12
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/permission_handler_windows-0.2.1/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/permission_handler_windows-0.2.1/lib/
platform
3.2
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/platform-3.1.6/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/platform-3.1.6/lib/
plugin_platform_interface
3.0
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/plugin_platform_interface-2.1.8/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/plugin_platform_interface-2.1.8/lib/
process
3.3
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/process-5.0.3/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/process-5.0.3/lib/
record
3.5
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record-6.1.1/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record-6.1.1/lib/
record_android
3.5
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record_android-1.4.1/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record_android-1.4.1/lib/
record_ios
3.5
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record_ios-1.1.2/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record_ios-1.1.2/lib/
record_linux
3.5
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record_linux-1.2.1/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record_linux-1.2.1/lib/
record_macos
3.5
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record_macos-1.1.1/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record_macos-1.1.1/lib/
record_platform_interface
3.5
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record_platform_interface-1.4.0/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record_platform_interface-1.4.0/lib/
record_web
3.5
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record_web-1.2.0/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record_web-1.2.0/lib/
record_windows
3.5
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record_windows-1.0.7/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/record_windows-1.0.7/lib/
sherpa_onnx
3.1
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sherpa_onnx-1.12.10/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sherpa_onnx-1.12.10/lib/
sherpa_onnx_android
3.0
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sherpa_onnx_android-1.12.10/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sherpa_onnx_android-1.12.10/lib/
sherpa_onnx_ios
3.0
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sherpa_onnx_ios-1.12.10/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sherpa_onnx_ios-1.12.10/lib/
sherpa_onnx_linux
3.0
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sherpa_onnx_linux-1.12.10/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sherpa_onnx_linux-1.12.10/lib/
sherpa_onnx_macos
3.0
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sherpa_onnx_macos-1.12.10/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sherpa_onnx_macos-1.12.10/lib/
sherpa_onnx_windows
3.0
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sherpa_onnx_windows-1.12.10/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sherpa_onnx_windows-1.12.10/lib/
source_span
3.1
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/source_span-1.10.1/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/source_span-1.10.1/lib/
sprintf
2.12
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sprintf-7.0.0/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sprintf-7.0.0/lib/
stack_trace
3.4
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/stack_trace-1.12.1/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/stack_trace-1.12.1/lib/
stream_channel
3.3
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/stream_channel-2.1.4/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/stream_channel-2.1.4/lib/
string_scanner
3.1
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/string_scanner-1.4.1/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/string_scanner-1.4.1/lib/
sync_http
2.12
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sync_http-0.3.1/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/sync_http-0.3.1/lib/
term_glyph
3.1
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/term_glyph-1.2.2/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/term_glyph-1.2.2/lib/
test_api
3.5
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/test_api-0.7.4/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/test_api-0.7.4/lib/
typed_data
3.5
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/typed_data-1.4.0/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/typed_data-1.4.0/lib/
uuid
3.0
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/uuid-4.5.1/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/uuid-4.5.1/lib/
vector_math
2.14
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/vector_math-2.1.4/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/vector_math-2.1.4/lib/
vm_service
3.3
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/vm_service-15.0.0/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/vm_service-15.0.0/lib/
web
3.4
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/web-1.1.1/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/web-1.1.1/lib/
webdriver
3.1
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/webdriver-3.1.0/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/webdriver-3.1.0/lib/
xdg_directories
3.3
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/xdg_directories-1.1.0/
file:///Users/max/.pub-cache/hosted/pub.flutter-io.cn/xdg_directories-1.1.0/lib/
yx_asr
3.0
file:///Users/max/SourceCode/yuanxuan/yx_asr/
file:///Users/max/SourceCode/yuanxuan/yx_asr/lib/
sky_engine
3.7
file:///Users/max/fvm/versions/3.32.0/bin/cache/pkg/sky_engine/
file:///Users/max/fvm/versions/3.32.0/bin/cache/pkg/sky_engine/lib/
flutter
3.7
file:///Users/max/fvm/versions/3.32.0/packages/flutter/
file:///Users/max/fvm/versions/3.32.0/packages/flutter/lib/
flutter_driver
3.7
file:///Users/max/fvm/versions/3.32.0/packages/flutter_driver/
file:///Users/max/fvm/versions/3.32.0/packages/flutter_driver/lib/
flutter_test
3.7
file:///Users/max/fvm/versions/3.32.0/packages/flutter_test/
file:///Users/max/fvm/versions/3.32.0/packages/flutter_test/lib/
flutter_web_plugins
3.7
file:///Users/max/fvm/versions/3.32.0/packages/flutter_web_plugins/
file:///Users/max/fvm/versions/3.32.0/packages/flutter_web_plugins/lib/
fuchsia_remote_debug_protocol
3.7
file:///Users/max/fvm/versions/3.32.0/packages/fuchsia_remote_debug_protocol/
file:///Users/max/fvm/versions/3.32.0/packages/fuchsia_remote_debug_protocol/lib/
integration_test
3.7
file:///Users/max/fvm/versions/3.32.0/packages/integration_test/
file:///Users/max/fvm/versions/3.32.0/packages/integration_test/lib/
2

View File

@ -0,0 +1,662 @@
{
"roots": [
"yx_asr"
],
"packages": [
{
"name": "yx_asr",
"version": "1.0.0",
"dependencies": [
"flutter",
"path",
"path_provider",
"permission_handler",
"record",
"sherpa_onnx"
],
"devDependencies": [
"flutter_lints",
"flutter_test",
"integration_test"
]
},
{
"name": "flutter_lints",
"version": "3.0.2",
"dependencies": [
"lints"
]
},
{
"name": "integration_test",
"version": "0.0.0",
"dependencies": [
"async",
"boolean_selector",
"characters",
"clock",
"collection",
"fake_async",
"file",
"flutter",
"flutter_driver",
"flutter_test",
"leak_tracker",
"leak_tracker_flutter_testing",
"leak_tracker_testing",
"matcher",
"material_color_utilities",
"meta",
"path",
"source_span",
"stack_trace",
"stream_channel",
"string_scanner",
"sync_http",
"term_glyph",
"test_api",
"vector_math",
"vm_service",
"webdriver"
]
},
{
"name": "flutter_test",
"version": "0.0.0",
"dependencies": [
"async",
"boolean_selector",
"characters",
"clock",
"collection",
"fake_async",
"flutter",
"leak_tracker",
"leak_tracker_flutter_testing",
"leak_tracker_testing",
"matcher",
"material_color_utilities",
"meta",
"path",
"source_span",
"stack_trace",
"stream_channel",
"string_scanner",
"term_glyph",
"test_api",
"vector_math",
"vm_service"
]
},
{
"name": "record",
"version": "6.1.1",
"dependencies": [
"flutter",
"record_android",
"record_ios",
"record_linux",
"record_macos",
"record_platform_interface",
"record_web",
"record_windows",
"uuid"
]
},
{
"name": "permission_handler",
"version": "12.0.1",
"dependencies": [
"flutter",
"meta",
"permission_handler_android",
"permission_handler_apple",
"permission_handler_html",
"permission_handler_platform_interface",
"permission_handler_windows"
]
},
{
"name": "path",
"version": "1.9.1",
"dependencies": []
},
{
"name": "sherpa_onnx",
"version": "1.12.10",
"dependencies": [
"ffi",
"flutter",
"sherpa_onnx_android",
"sherpa_onnx_ios",
"sherpa_onnx_linux",
"sherpa_onnx_macos",
"sherpa_onnx_windows"
]
},
{
"name": "flutter",
"version": "0.0.0",
"dependencies": [
"characters",
"collection",
"material_color_utilities",
"meta",
"sky_engine",
"vector_math"
]
},
{
"name": "lints",
"version": "3.0.0",
"dependencies": []
},
{
"name": "webdriver",
"version": "3.1.0",
"dependencies": [
"matcher",
"path",
"stack_trace",
"sync_http"
]
},
{
"name": "vector_math",
"version": "2.1.4",
"dependencies": []
},
{
"name": "test_api",
"version": "0.7.4",
"dependencies": [
"async",
"boolean_selector",
"collection",
"meta",
"source_span",
"stack_trace",
"stream_channel",
"string_scanner",
"term_glyph"
]
},
{
"name": "term_glyph",
"version": "1.2.2",
"dependencies": []
},
{
"name": "sync_http",
"version": "0.3.1",
"dependencies": []
},
{
"name": "string_scanner",
"version": "1.4.1",
"dependencies": [
"source_span"
]
},
{
"name": "stream_channel",
"version": "2.1.4",
"dependencies": [
"async"
]
},
{
"name": "stack_trace",
"version": "1.12.1",
"dependencies": [
"path"
]
},
{
"name": "source_span",
"version": "1.10.1",
"dependencies": [
"collection",
"path",
"term_glyph"
]
},
{
"name": "meta",
"version": "1.16.0",
"dependencies": []
},
{
"name": "material_color_utilities",
"version": "0.11.1",
"dependencies": [
"collection"
]
},
{
"name": "matcher",
"version": "0.12.17",
"dependencies": [
"async",
"meta",
"stack_trace",
"term_glyph",
"test_api"
]
},
{
"name": "leak_tracker_testing",
"version": "3.0.1",
"dependencies": [
"leak_tracker",
"matcher",
"meta"
]
},
{
"name": "leak_tracker_flutter_testing",
"version": "3.0.9",
"dependencies": [
"flutter",
"leak_tracker",
"leak_tracker_testing",
"matcher",
"meta"
]
},
{
"name": "leak_tracker",
"version": "10.0.9",
"dependencies": [
"clock",
"collection",
"meta",
"path",
"vm_service"
]
},
{
"name": "file",
"version": "7.0.1",
"dependencies": [
"meta",
"path"
]
},
{
"name": "fake_async",
"version": "1.3.3",
"dependencies": [
"clock",
"collection"
]
},
{
"name": "collection",
"version": "1.19.1",
"dependencies": []
},
{
"name": "clock",
"version": "1.1.2",
"dependencies": []
},
{
"name": "characters",
"version": "1.4.0",
"dependencies": []
},
{
"name": "boolean_selector",
"version": "2.1.2",
"dependencies": [
"source_span",
"string_scanner"
]
},
{
"name": "async",
"version": "2.13.0",
"dependencies": [
"collection",
"meta"
]
},
{
"name": "vm_service",
"version": "15.0.0",
"dependencies": []
},
{
"name": "flutter_driver",
"version": "0.0.0",
"dependencies": [
"async",
"boolean_selector",
"characters",
"clock",
"collection",
"file",
"flutter",
"flutter_test",
"fuchsia_remote_debug_protocol",
"leak_tracker",
"leak_tracker_flutter_testing",
"leak_tracker_testing",
"matcher",
"material_color_utilities",
"meta",
"path",
"platform",
"process",
"source_span",
"stack_trace",
"stream_channel",
"string_scanner",
"sync_http",
"term_glyph",
"test_api",
"vector_math",
"vm_service",
"webdriver"
]
},
{
"name": "record_macos",
"version": "1.1.1",
"dependencies": [
"flutter",
"record_platform_interface"
]
},
{
"name": "record_ios",
"version": "1.1.2",
"dependencies": [
"flutter",
"record_platform_interface"
]
},
{
"name": "record_android",
"version": "1.4.1",
"dependencies": [
"flutter",
"record_platform_interface"
]
},
{
"name": "record_linux",
"version": "1.2.1",
"dependencies": [
"flutter",
"record_platform_interface"
]
},
{
"name": "record_windows",
"version": "1.0.7",
"dependencies": [
"flutter",
"record_platform_interface"
]
},
{
"name": "record_web",
"version": "1.2.0",
"dependencies": [
"flutter",
"flutter_web_plugins",
"record_platform_interface",
"web"
]
},
{
"name": "record_platform_interface",
"version": "1.4.0",
"dependencies": [
"flutter",
"plugin_platform_interface"
]
},
{
"name": "uuid",
"version": "4.5.1",
"dependencies": [
"crypto",
"fixnum",
"meta",
"sprintf"
]
},
{
"name": "permission_handler_platform_interface",
"version": "4.3.0",
"dependencies": [
"flutter",
"meta",
"plugin_platform_interface"
]
},
{
"name": "permission_handler_windows",
"version": "0.2.1",
"dependencies": [
"flutter",
"permission_handler_platform_interface"
]
},
{
"name": "permission_handler_html",
"version": "0.1.3+5",
"dependencies": [
"flutter",
"flutter_web_plugins",
"permission_handler_platform_interface",
"web"
]
},
{
"name": "permission_handler_apple",
"version": "9.4.7",
"dependencies": [
"flutter",
"permission_handler_platform_interface"
]
},
{
"name": "permission_handler_android",
"version": "13.0.1",
"dependencies": [
"flutter",
"permission_handler_platform_interface"
]
},
{
"name": "sherpa_onnx_ios",
"version": "1.12.10",
"dependencies": [
"flutter"
]
},
{
"name": "sherpa_onnx_windows",
"version": "1.12.10",
"dependencies": [
"flutter"
]
},
{
"name": "sherpa_onnx_linux",
"version": "1.12.10",
"dependencies": [
"flutter"
]
},
{
"name": "sherpa_onnx_macos",
"version": "1.12.10",
"dependencies": [
"flutter"
]
},
{
"name": "sherpa_onnx_android",
"version": "1.12.10",
"dependencies": [
"flutter"
]
},
{
"name": "ffi",
"version": "2.1.4",
"dependencies": []
},
{
"name": "sky_engine",
"version": "0.0.0",
"dependencies": []
},
{
"name": "process",
"version": "5.0.3",
"dependencies": [
"file",
"path",
"platform"
]
},
{
"name": "platform",
"version": "3.1.6",
"dependencies": []
},
{
"name": "fuchsia_remote_debug_protocol",
"version": "0.0.0",
"dependencies": [
"file",
"meta",
"path",
"platform",
"process",
"vm_service"
]
},
{
"name": "web",
"version": "1.1.1",
"dependencies": []
},
{
"name": "flutter_web_plugins",
"version": "0.0.0",
"dependencies": [
"characters",
"collection",
"flutter",
"material_color_utilities",
"meta",
"vector_math"
]
},
{
"name": "plugin_platform_interface",
"version": "2.1.8",
"dependencies": [
"meta"
]
},
{
"name": "fixnum",
"version": "1.1.1",
"dependencies": []
},
{
"name": "sprintf",
"version": "7.0.0",
"dependencies": []
},
{
"name": "crypto",
"version": "3.0.6",
"dependencies": [
"typed_data"
]
},
{
"name": "typed_data",
"version": "1.4.0",
"dependencies": [
"collection"
]
},
{
"name": "path_provider",
"version": "2.1.5",
"dependencies": [
"flutter",
"path_provider_android",
"path_provider_foundation",
"path_provider_linux",
"path_provider_platform_interface",
"path_provider_windows"
]
},
{
"name": "path_provider_linux",
"version": "2.2.1",
"dependencies": [
"ffi",
"flutter",
"path",
"path_provider_platform_interface",
"xdg_directories"
]
},
{
"name": "path_provider_windows",
"version": "2.3.0",
"dependencies": [
"ffi",
"flutter",
"path",
"path_provider_platform_interface"
]
},
{
"name": "path_provider_platform_interface",
"version": "2.1.2",
"dependencies": [
"flutter",
"platform",
"plugin_platform_interface"
]
},
{
"name": "path_provider_foundation",
"version": "2.4.2",
"dependencies": [
"flutter",
"path_provider_platform_interface"
]
},
{
"name": "xdg_directories",
"version": "1.1.0",
"dependencies": [
"meta",
"path"
]
},
{
"name": "path_provider_android",
"version": "2.2.17",
"dependencies": [
"flutter",
"path_provider_platform_interface"
]
}
],
"configVersion": 1
}

1
.dart_tool/version Normal file
View File

@ -0,0 +1 @@
3.32.0

File diff suppressed because one or more lines are too long

46
.vscode/launch.json vendored Normal file
View File

@ -0,0 +1,46 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "yx_asr",
"request": "launch",
"type": "dart"
},
{
"name": "yx_asr (profile mode)",
"request": "launch",
"type": "dart",
"flutterMode": "profile"
},
{
"name": "yx_asr (release mode)",
"request": "launch",
"type": "dart",
"flutterMode": "release"
},
{
"name": "example",
"cwd": "example",
"request": "launch",
"type": "dart"
},
{
"name": "example (profile mode)",
"cwd": "example",
"request": "launch",
"type": "dart",
"flutterMode": "profile"
},
{
"name": "example (release mode)",
"cwd": "example",
"request": "launch",
"type": "dart",
"flutterMode": "release"
}
]
}

44
CHANGELOG.md Normal file
View File

@ -0,0 +1,44 @@
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [1.0.0] - 2025-08-26
### Added
- Initial release of YX ASR Flutter plugin
- Real-time speech-to-text recognition for iOS and Android
- Support for multiple languages and locales
- Customizable RecordingButton widget with visual feedback
- Comprehensive error handling and permission management
- Stream-based API for results, errors, and status changes
- On-device recognition support for iOS (iOS 13+)
- Partial results support for real-time transcription
- Example app demonstrating all features
- Comprehensive documentation and API reference
### Features
- Cross-platform support (iOS 11+ and Android API 21+)
- Minimal third-party dependencies
- Proper lifecycle management
- Permission handling for microphone access
- Multiple recognition alternatives
- Confidence scoring for recognition results
- Cancellation support
- Audio session management
### Platform Support
- **iOS**: Uses Speech framework with AVAudioEngine
- **Android**: Uses SpeechRecognizer API with proper lifecycle management
### Supported Languages
- English (en-US, en-GB)
- Chinese (zh-CN, zh-TW)
- Japanese (ja-JP)
- Korean (ko-KR)
- Spanish (es-ES)
- French (fr-FR)
- German (de-DE)
- Italian (it-IT)

21
LICENSE Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2025 Yuanxuan
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

331
README.md Normal file
View File

@ -0,0 +1,331 @@
# YX ASR - Flutter Speech-to-Text Plugin
基于 sherpa_onnx 的 Flutter 语音识别插件,提供完全离线的实时语音转文字功能。
## 特性
- 🎤 **实时语音识别**: 边说边转换的实时转录功能
- 🔄 **切换录音**: 简单的开始/停止录音,带有视觉反馈
- 🌍 **多语言支持**: 支持中文、英文等多种语言
- 📱 **跨平台**: 支持 iOS 和 Android 平台
- 🎛️ **自定义UI**: 灵活的录音按钮组件,支持自定义外观
- 🔒 **权限管理**: 自动处理麦克风权限申请
- ⚡ **完全离线**: 基于 sherpa_onnx无需网络连接
- 🎯 **高精度识别**: 使用先进的神经网络模型
- 🚀 **低延迟**: 实时处理,响应迅速
- 🔐 **隐私保护**: 语音数据不会上传到云端
## 安装
在您的 `pubspec.yaml` 文件中添加依赖:
```yaml
dependencies:
yx_asr: ^1.0.0
```
然后运行:
```bash
flutter pub get
```
## 模型文件准备
由于使用 sherpa_onnx您需要下载对应的模型文件
1. **中文模型** (推荐)
- 下载地址: https://github.com/k2-fsa/sherpa-onnx/releases/
- 模型名称: `sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20`
- 解压到: `assets/models/zh-cn/`
2. **英文模型**
- 模型名称: `sherpa-onnx-streaming-zipformer-en-2023-02-21`
- 解压到: `assets/models/en-us/`
3. **模型文件结构**
```
assets/models/
├── zh-cn/
│ ├── encoder.onnx
│ ├── decoder.onnx
│ ├── joiner.onnx
│ └── tokens.txt
└── en-us/
├── encoder.onnx
├── decoder.onnx
├── joiner.onnx
└── tokens.txt
```
## 平台配置
### Android
`android/app/src/main/AndroidManifest.xml` 中添加权限:
```xml
<uses-permission android:name="android.permission.RECORD_AUDIO" />
```
### iOS
`ios/Runner/Info.plist` 中添加权限:
```xml
<key>NSMicrophoneUsageDescription</key>
<string>此应用需要麦克风权限来录制您的语音进行识别</string>
```
注意:由于使用 sherpa_onnx 进行离线识别,不需要网络权限和语音识别权限。
## 快速开始
### 基本使用
```dart
import 'package:yx_asr/yx_asr.dart';
class MyApp extends StatefulWidget {
@override
_MyAppState createState() => _MyAppState();
}
class _MyAppState extends State<MyApp> {
final YxAsr _speechToText = YxAsr();
String _recognizedText = '';
bool _isListening = false;
@override
void initState() {
super.initState();
_initializeSpeechToText();
}
Future<void> _initializeSpeechToText() async {
// 使用中文模型初始化
bool initialized = await _speechToText.initializeWithModel('assets/models/zh-cn');
if (initialized) {
// 监听识别结果
_speechToText.onResult.listen((result) {
setState(() {
_recognizedText = result.recognizedWords;
});
});
// 监听错误
_speechToText.onError.listen((error) {
print('语音识别错误: ${error.errorMsg}');
});
// 监听状态变化
_speechToText.onListeningStatusChanged.listen((isListening) {
setState(() {
_isListening = isListening;
});
});
}
}
Future<void> _toggleRecording() async {
if (_isListening) {
await _speechToText.stopListening();
} else {
await _speechToText.startListening(
partialResults: true, // 启用部分结果
);
}
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(title: Text('语音识别')),
body: Column(
children: [
Text('识别结果: $_recognizedText'),
ElevatedButton(
onPressed: _toggleRecording,
child: Text(_isListening ? '停止' : '开始'),
),
],
),
);
}
}
```
### Using the Recording Button Widget
The plugin includes a customizable `RecordingButton` widget:
```dart
import 'package:yx_asr/yx_asr.dart';
RecordingButton(
onResult: (result) {
print('Result: ${result.recognizedWords}');
},
onError: (error) {
print('Error: ${error.errorMsg}');
},
onListeningStatusChanged: (isListening) {
print('Listening: $isListening');
},
localeId: 'en-US',
partialResults: true,
size: 80.0,
tooltip: 'Tap to record',
)
```
## API Reference
### YxAsr Class
#### Methods
- `Future<bool> initialize()` - Initialize the speech recognition service
- `Future<bool> isAvailable()` - Check if speech recognition is available
- `Future<bool> hasPermission()` - Check if microphone permission is granted
- `Future<bool> requestPermission()` - Request microphone permission
- `Future<void> startListening({String localeId, bool partialResults, bool onDevice})` - Start listening
- `Future<void> stopListening()` - Stop listening and get final result
- `Future<void> cancel()` - Cancel current recognition session
- `Future<bool> get isListening` - Check if currently listening
#### Streams
- `Stream<SpeechRecognitionResult> onResult` - Stream of recognition results
- `Stream<SpeechRecognitionError> onError` - Stream of recognition errors
- `Stream<bool> onListeningStatusChanged` - Stream of listening status changes
### SpeechRecognitionResult
```dart
class SpeechRecognitionResult {
final String recognizedWords; // The recognized text
final bool finalResult; // Whether this is a final result
final double confidence; // Confidence level (0.0 to 1.0)
final List<String> alternatives; // Alternative recognition results
}
```
### SpeechRecognitionError
```dart
class SpeechRecognitionError {
final SpeechRecognitionErrorType errorType; // Type of error
final String errorMsg; // Human-readable error message
final String? errorCode; // Platform-specific error code
}
```
### RecordingButton Widget
#### Properties
- `onResult` - Callback for recognition results
- `onError` - Callback for recognition errors
- `onListeningStatusChanged` - Callback for status changes
- `localeId` - Language locale (default: 'en-US')
- `partialResults` - Enable partial results (default: true)
- `onDevice` - Use on-device recognition on iOS (default: false)
- `size` - Button size (default: 80.0)
- `idleColor` - Button color when not recording
- `recordingColor` - Button color when recording
- `disabledColor` - Button color when disabled
- `enabled` - Whether the button is enabled (default: true)
- `tooltip` - Tooltip text
## Supported Languages
The plugin supports multiple languages including:
- English (en-US, en-GB)
- Chinese (zh-CN, zh-TW)
- Japanese (ja-JP)
- Korean (ko-KR)
- Spanish (es-ES)
- French (fr-FR)
- German (de-DE)
- Italian (it-IT)
## Error Handling
The plugin provides comprehensive error handling through the `SpeechRecognitionError` class:
```dart
_speechToText.onError.listen((error) {
switch (error.errorType) {
case SpeechRecognitionErrorType.permissionDenied:
// Handle permission denied
break;
case SpeechRecognitionErrorType.network:
// Handle network errors
break;
case SpeechRecognitionErrorType.noSpeech:
// Handle no speech detected
break;
// ... handle other error types
}
});
```
## Best Practices
1. **Always check permissions** before starting recognition
2. **Handle errors gracefully** to provide good user experience
3. **Use partial results** for real-time feedback
4. **Stop listening** when done to conserve battery
5. **Test on real devices** as speech recognition doesn't work well on simulators
## Example App
Check out the `example/` directory for a comprehensive example app that demonstrates:
- Real-time speech recognition
- Multiple language support
- Error handling
- Recognition history
- Customizable settings
## Contributing
Contributions are welcome! Please read our contributing guidelines and submit pull requests to our repository.
## License
This project is licensed under the MIT License - see the LICENSE file for details.
## Troubleshooting
### Common Issues
1. **Permission Denied Error**
- Ensure microphone permissions are added to platform manifests
- Call `requestPermission()` before starting recognition
2. **Speech Recognition Not Available**
- Check if device supports speech recognition with `isAvailable()`
- Ensure Google app is installed and updated on Android
3. **No Speech Detected**
- Check microphone hardware
- Ensure app has microphone permission
- Try speaking louder or closer to the microphone
4. **Network Errors**
- Check internet connectivity
- Some platforms require network for speech recognition
### Testing
- Speech recognition doesn't work well on simulators/emulators
- Always test on real devices
- Test in quiet environments for better accuracy
## Support
For issues and feature requests, please use the GitHub issue tracker.

302
SHERPA_ONNX_USAGE.md Normal file
View File

@ -0,0 +1,302 @@
# YX ASR - Sherpa ONNX 使用指南
本文档详细说明如何使用基于 sherpa_onnx 的 YX ASR 语音识别插件。
## 🎯 核心优势
### 与原生识别的对比
| 特性 | 原生识别 | sherpa_onnx |
|------|----------|-------------|
| **网络依赖** | 需要网络 | 完全离线 |
| **隐私保护** | 数据上传云端 | 本地处理 |
| **识别一致性** | 平台差异 | 跨平台一致 |
| **自定义能力** | 受限 | 高度可定制 |
| **包体积** | 无增加 | +40-60MB |
| **识别精度** | 高(云端) | 高(本地模型) |
## 📦 安装配置
### 1. 添加依赖
```yaml
dependencies:
yx_asr: ^1.0.0
```
### 2. 下载模型文件
从 [sherpa-onnx releases](https://github.com/k2-fsa/sherpa-onnx/releases/) 下载模型:
**推荐模型:**
- **中英双语**: `sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20`
- **纯中文**: `sherpa-onnx-streaming-zipformer-zh-2023-02-13`
- **纯英文**: `sherpa-onnx-streaming-zipformer-en-2023-02-21`
### 3. 模型文件结构
```
assets/models/
├── zh-cn/ # 中文模型
│ ├── encoder.onnx # 编码器
│ ├── decoder.onnx # 解码器
│ ├── joiner.onnx # 连接器
│ └── tokens.txt # 词汇表
├── en-us/ # 英文模型
│ ├── encoder.onnx
│ ├── decoder.onnx
│ ├── joiner.onnx
│ └── tokens.txt
└── bilingual/ # 双语模型
├── encoder.onnx
├── decoder.onnx
├── joiner.onnx
└── tokens.txt
```
## 🚀 使用方法
### 基本使用
```dart
import 'package:yx_asr/yx_asr.dart';
class SpeechRecognitionPage extends StatefulWidget {
@override
_SpeechRecognitionPageState createState() => _SpeechRecognitionPageState();
}
class _SpeechRecognitionPageState extends State<SpeechRecognitionPage> {
final YxAsr _asr = YxAsr();
String _result = '';
bool _isListening = false;
@override
void initState() {
super.initState();
_initializeASR();
}
Future<void> _initializeASR() async {
// 初始化中文模型
final success = await _asr.initializeWithModel('assets/models/zh-cn');
if (success) {
// 监听识别结果
_asr.onResult.listen((result) {
setState(() {
_result = result.recognizedWords;
});
});
// 监听错误
_asr.onError.listen((error) {
print('错误: ${error.errorMsg}');
});
// 监听状态
_asr.onListeningStatusChanged.listen((isListening) {
setState(() {
_isListening = isListening;
});
});
}
}
Future<void> _toggleRecording() async {
if (_isListening) {
await _asr.stopListening();
} else {
await _asr.startListening(partialResults: true);
}
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(title: Text('语音识别')),
body: Column(
children: [
Text('识别结果: $_result'),
ElevatedButton(
onPressed: _toggleRecording,
child: Text(_isListening ? '停止录音' : '开始录音'),
),
],
),
);
}
}
```
### 使用录音按钮组件
```dart
RecordingButton(
onResult: (result) {
print('识别结果: ${result.recognizedWords}');
print('是否最终结果: ${result.finalResult}');
print('置信度: ${result.confidence}');
},
onError: (error) {
print('错误: ${error.errorMsg}');
},
localeId: 'zh-CN', // 会自动选择对应模型
size: 80.0,
recordingColor: Colors.red,
idleColor: Colors.blue,
tooltip: '点击开始录音',
)
```
### 多语言支持
```dart
class MultiLanguageASR {
final YxAsr _asr = YxAsr();
String _currentLanguage = 'zh-cn';
Future<void> switchLanguage(String language) async {
String modelPath;
switch (language) {
case 'zh-cn':
modelPath = 'assets/models/zh-cn';
break;
case 'en-us':
modelPath = 'assets/models/en-us';
break;
case 'bilingual':
modelPath = 'assets/models/bilingual';
break;
default:
modelPath = 'assets/models/zh-cn';
}
final success = await _asr.initializeWithModel(modelPath);
if (success) {
_currentLanguage = language;
print('切换到 $language 模型成功');
}
}
}
```
## ⚙️ 高级配置
### 自定义采样率
```dart
await _asr.initializeWithModel(
'assets/models/zh-cn',
sampleRate: 16000, // 默认 16kHz
);
await _asr.startListening(
partialResults: true,
sampleRate: 16000,
);
```
### 错误处理
```dart
_asr.onError.listen((error) {
switch (error.errorType) {
case SpeechRecognitionErrorType.permissionDenied:
// 处理权限被拒绝
showDialog(context: context, builder: (context) =>
AlertDialog(title: Text('需要麦克风权限')));
break;
case SpeechRecognitionErrorType.service:
// 处理服务错误
print('服务错误: ${error.errorMsg}');
break;
case SpeechRecognitionErrorType.audio:
// 处理音频错误
print('音频错误: ${error.errorMsg}');
break;
default:
print('未知错误: ${error.errorMsg}');
}
});
```
## 🔧 性能优化
### 1. 模型选择
- **小型应用**: 使用单语言模型 (~40MB)
- **多语言应用**: 使用双语模型 (~60MB)
- **专业应用**: 训练自定义模型
### 2. 内存管理
```dart
@override
void dispose() {
_asr.dispose(); // 释放资源
super.dispose();
}
```
### 3. 批量处理
```dart
// 对于长时间录音,定期获取结果
Timer.periodic(Duration(seconds: 5), (timer) {
if (_asr.isListening) {
// 可以在这里保存中间结果
}
});
```
## 📱 平台特定配置
### Android
```xml
<!-- android/app/src/main/AndroidManifest.xml -->
<uses-permission android:name="android.permission.RECORD_AUDIO" />
```
### iOS
```xml
<!-- ios/Runner/Info.plist -->
<key>NSMicrophoneUsageDescription</key>
<string>此应用需要麦克风权限进行语音识别</string>
```
## 🐛 常见问题
### Q: 模型文件太大怎么办?
A: 可以考虑:
1. 使用模型压缩
2. 动态下载模型
3. 只包含必要的语言模型
### Q: 识别精度不够怎么办?
A: 可以尝试:
1. 使用更新的模型
2. 调整音频参数
3. 在安静环境中测试
4. 训练自定义模型
### Q: 如何实现实时显示?
A: 启用 `partialResults: true` 并监听结果流:
```dart
_asr.onResult.listen((result) {
if (result.finalResult) {
// 最终结果
finalText = result.recognizedWords;
} else {
// 实时结果
partialText = result.recognizedWords;
}
});
```
## 📚 参考资源
- [sherpa-onnx 官方文档](https://github.com/k2-fsa/sherpa-onnx)
- [模型下载地址](https://github.com/k2-fsa/sherpa-onnx/releases/)
- [Flutter 音频处理](https://pub.dev/packages/record)
## 🤝 技术支持
如有问题,请提交 Issue 或参考示例代码。

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

View File

@ -0,0 +1,2 @@
#Tue Aug 26 19:25:13 CST 2025
gradle.version=8.9

View File

View File

@ -0,0 +1,39 @@
package io.flutter.plugins;
import androidx.annotation.Keep;
import androidx.annotation.NonNull;
import io.flutter.Log;
import io.flutter.embedding.engine.FlutterEngine;
/**
* Generated file. Do not edit.
* This file is generated by the Flutter tool based on the
* plugins that support the Android platform.
*/
@Keep
public final class GeneratedPluginRegistrant {
private static final String TAG = "GeneratedPluginRegistrant";
public static void registerWith(@NonNull FlutterEngine flutterEngine) {
try {
flutterEngine.getPlugins().add(new dev.flutter.plugins.integration_test.IntegrationTestPlugin());
} catch (Exception e) {
Log.e(TAG, "Error registering plugin integration_test, dev.flutter.plugins.integration_test.IntegrationTestPlugin", e);
}
try {
flutterEngine.getPlugins().add(new io.flutter.plugins.pathprovider.PathProviderPlugin());
} catch (Exception e) {
Log.e(TAG, "Error registering plugin path_provider_android, io.flutter.plugins.pathprovider.PathProviderPlugin", e);
}
try {
flutterEngine.getPlugins().add(new com.baseflow.permissionhandler.PermissionHandlerPlugin());
} catch (Exception e) {
Log.e(TAG, "Error registering plugin permission_handler_android, com.baseflow.permissionhandler.PermissionHandlerPlugin", e);
}
try {
flutterEngine.getPlugins().add(new com.llfbandit.record.RecordPlugin());
} catch (Exception e) {
Log.e(TAG, "Error registering plugin record_android, com.llfbandit.record.RecordPlugin", e);
}
}
}

63
android/build.gradle Normal file
View File

@ -0,0 +1,63 @@
group 'com.yuanxuan.yx_asr'
version '1.0.0'
buildscript {
ext.kotlin_version = '1.7.10'
repositories {
google()
mavenCentral()
}
dependencies {
classpath 'com.android.tools.build:gradle:7.3.0'
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
}
}
allprojects {
repositories {
google()
mavenCentral()
}
}
apply plugin: 'com.android.library'
apply plugin: 'kotlin-android'
android {
compileSdkVersion 33
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
kotlinOptions {
jvmTarget = '1.8'
}
sourceSets {
main.java.srcDirs += 'src/main/kotlin'
test.java.srcDirs += 'src/test/kotlin'
}
defaultConfig {
minSdkVersion 21
}
dependencies {
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
}
testOptions {
unitTests.all {
useJUnitPlatform()
testLogging {
events "passed", "skipped", "failed", "standardOut", "standardError"
outputs.upToDateWhen {false}
showStandardStreams = true
}
}
}
}

2
android/local.properties Normal file
View File

@ -0,0 +1,2 @@
sdk.dir=/Users/max/development/android/sdk
flutter.sdk=/Users/max/fvm/versions/3.32.0

View File

@ -0,0 +1,12 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.yuanxuan.yx_asr">
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.INTERNET" />
<queries>
<intent>
<action android:name="android.speech.RecognitionService" />
</intent>
</queries>
</manifest>

View File

@ -0,0 +1,328 @@
package com.yuanxuan.yx_asr
import android.Manifest
import android.content.Context
import android.content.Intent
import android.content.pm.PackageManager
import android.os.Bundle
import android.speech.RecognitionListener
import android.speech.RecognizerIntent
import android.speech.SpeechRecognizer
import androidx.core.app.ActivityCompat
import androidx.core.content.ContextCompat
import io.flutter.embedding.engine.plugins.FlutterPlugin
import io.flutter.embedding.engine.plugins.activity.ActivityAware
import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding
import io.flutter.plugin.common.EventChannel
import io.flutter.plugin.common.MethodCall
import io.flutter.plugin.common.MethodChannel
import io.flutter.plugin.common.MethodChannel.MethodCallHandler
import io.flutter.plugin.common.MethodChannel.Result
import io.flutter.plugin.common.PluginRegistry
class YxAsrPlugin: FlutterPlugin, MethodCallHandler, ActivityAware, PluginRegistry.RequestPermissionsResultListener {
private lateinit var channel: MethodChannel
private lateinit var resultEventChannel: EventChannel
private lateinit var errorEventChannel: EventChannel
private lateinit var statusEventChannel: EventChannel
private var context: Context? = null
private var activity: android.app.Activity? = null
private var speechRecognizer: SpeechRecognizer? = null
private var isListening = false
private var resultEventSink: EventChannel.EventSink? = null
private var errorEventSink: EventChannel.EventSink? = null
private var statusEventSink: EventChannel.EventSink? = null
private var permissionResult: Result? = null
companion object {
private const val PERMISSION_REQUEST_CODE = 1001
}
override fun onAttachedToEngine(flutterPluginBinding: FlutterPlugin.FlutterPluginBinding) {
context = flutterPluginBinding.applicationContext
channel = MethodChannel(flutterPluginBinding.binaryMessenger, "yx_asr")
channel.setMethodCallHandler(this)
resultEventChannel = EventChannel(flutterPluginBinding.binaryMessenger, "yx_asr/results")
resultEventChannel.setStreamHandler(object : EventChannel.StreamHandler {
override fun onListen(arguments: Any?, events: EventChannel.EventSink?) {
resultEventSink = events
}
override fun onCancel(arguments: Any?) {
resultEventSink = null
}
})
errorEventChannel = EventChannel(flutterPluginBinding.binaryMessenger, "yx_asr/errors")
errorEventChannel.setStreamHandler(object : EventChannel.StreamHandler {
override fun onListen(arguments: Any?, events: EventChannel.EventSink?) {
errorEventSink = events
}
override fun onCancel(arguments: Any?) {
errorEventSink = null
}
})
statusEventChannel = EventChannel(flutterPluginBinding.binaryMessenger, "yx_asr/status")
statusEventChannel.setStreamHandler(object : EventChannel.StreamHandler {
override fun onListen(arguments: Any?, events: EventChannel.EventSink?) {
statusEventSink = events
}
override fun onCancel(arguments: Any?) {
statusEventSink = null
}
})
}
override fun onMethodCall(call: MethodCall, result: Result) {
when (call.method) {
"isAvailable" -> {
result.success(SpeechRecognizer.isRecognitionAvailable(context))
}
"hasPermission" -> {
result.success(hasPermission())
}
"requestPermission" -> {
requestPermission(result)
}
"startListening" -> {
val localeId = call.argument<String>("localeId") ?: "en-US"
val partialResults = call.argument<Boolean>("partialResults") ?: true
startListening(localeId, partialResults, result)
}
"stopListening" -> {
stopListening(result)
}
"cancel" -> {
cancel(result)
}
"isListening" -> {
result.success(isListening)
}
else -> {
result.notImplemented()
}
}
}
private fun hasPermission(): Boolean {
return context?.let {
ContextCompat.checkSelfPermission(it, Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED
} ?: false
}
private fun requestPermission(result: Result) {
if (hasPermission()) {
result.success(true)
return
}
activity?.let {
permissionResult = result
ActivityCompat.requestPermissions(
it,
arrayOf(Manifest.permission.RECORD_AUDIO),
PERMISSION_REQUEST_CODE
)
} ?: result.success(false)
}
override fun onRequestPermissionsResult(
requestCode: Int,
permissions: Array<out String>,
grantResults: IntArray
): Boolean {
if (requestCode == PERMISSION_REQUEST_CODE) {
val granted = grantResults.isNotEmpty() && grantResults[0] == PackageManager.PERMISSION_GRANTED
permissionResult?.success(granted)
permissionResult = null
return true
}
return false
}
private fun startListening(localeId: String, partialResults: Boolean, result: Result) {
if (!hasPermission()) {
sendError("permissionDenied", "Microphone permission not granted", null)
result.error("PERMISSION_DENIED", "Microphone permission not granted", null)
return
}
if (isListening) {
result.success(null)
return
}
context?.let { ctx ->
try {
speechRecognizer = SpeechRecognizer.createSpeechRecognizer(ctx)
speechRecognizer?.setRecognitionListener(createRecognitionListener())
val intent = Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH).apply {
putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM)
putExtra(RecognizerIntent.EXTRA_LANGUAGE, localeId)
putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, partialResults)
putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 5)
}
speechRecognizer?.startListening(intent)
isListening = true
statusEventSink?.success(true)
result.success(null)
} catch (e: Exception) {
sendError("service", "Failed to start speech recognition: ${e.message}", null)
result.error("START_FAILED", "Failed to start speech recognition", e.message)
}
} ?: result.error("NO_CONTEXT", "Context not available", null)
}
private fun stopListening(result: Result) {
speechRecognizer?.stopListening()
result.success(null)
}
private fun cancel(result: Result) {
speechRecognizer?.cancel()
cleanup()
result.success(null)
}
private fun cleanup() {
speechRecognizer?.destroy()
speechRecognizer = null
isListening = false
statusEventSink?.success(false)
}
private fun createRecognitionListener(): RecognitionListener {
return object : RecognitionListener {
override fun onReadyForSpeech(params: Bundle?) {
// Speech recognition is ready
}
override fun onBeginningOfSpeech() {
// User started speaking
}
override fun onRmsChanged(rmsdB: Float) {
// Audio level changed
}
override fun onBufferReceived(buffer: ByteArray?) {
// Audio buffer received
}
override fun onEndOfSpeech() {
// User stopped speaking
}
override fun onError(error: Int) {
val errorType = when (error) {
SpeechRecognizer.ERROR_NETWORK_TIMEOUT, SpeechRecognizer.ERROR_NETWORK -> "network"
SpeechRecognizer.ERROR_AUDIO -> "audio"
SpeechRecognizer.ERROR_SERVER -> "service"
SpeechRecognizer.ERROR_CLIENT -> "service"
SpeechRecognizer.ERROR_SPEECH_TIMEOUT -> "noSpeech"
SpeechRecognizer.ERROR_NO_MATCH -> "noSpeech"
SpeechRecognizer.ERROR_RECOGNIZER_BUSY -> "service"
SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS -> "permissionDenied"
else -> "unknown"
}
val errorMsg = when (error) {
SpeechRecognizer.ERROR_NETWORK_TIMEOUT -> "Network timeout"
SpeechRecognizer.ERROR_NETWORK -> "Network error"
SpeechRecognizer.ERROR_AUDIO -> "Audio recording error"
SpeechRecognizer.ERROR_SERVER -> "Server error"
SpeechRecognizer.ERROR_CLIENT -> "Client error"
SpeechRecognizer.ERROR_SPEECH_TIMEOUT -> "No speech input"
SpeechRecognizer.ERROR_NO_MATCH -> "No recognition result matched"
SpeechRecognizer.ERROR_RECOGNIZER_BUSY -> "Recognition service busy"
SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS -> "Insufficient permissions"
else -> "Unknown error"
}
sendError(errorType, errorMsg, error.toString())
cleanup()
}
override fun onResults(results: Bundle?) {
results?.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION)?.let { matches ->
if (matches.isNotEmpty()) {
val confidence = results.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES)
sendResult(
recognizedWords = matches[0],
finalResult = true,
confidence = confidence?.get(0)?.toDouble() ?: 0.0,
alternatives = matches.drop(1)
)
}
}
cleanup()
}
override fun onPartialResults(partialResults: Bundle?) {
partialResults?.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION)?.let { matches ->
if (matches.isNotEmpty()) {
sendResult(
recognizedWords = matches[0],
finalResult = false,
confidence = 0.0,
alternatives = matches.drop(1)
)
}
}
}
override fun onEvent(eventType: Int, params: Bundle?) {
// Additional events
}
}
}
private fun sendResult(recognizedWords: String, finalResult: Boolean, confidence: Double, alternatives: List<String>) {
val result = mapOf(
"recognizedWords" to recognizedWords,
"finalResult" to finalResult,
"confidence" to confidence,
"alternatives" to alternatives
)
resultEventSink?.success(result)
}
private fun sendError(errorType: String, errorMsg: String, errorCode: String?) {
val error = mapOf(
"errorType" to errorType,
"errorMsg" to errorMsg,
"errorCode" to errorCode
)
errorEventSink?.success(error)
}
override fun onDetachedFromEngine(binding: FlutterPlugin.FlutterPluginBinding) {
channel.setMethodCallHandler(null)
cleanup()
}
override fun onAttachedToActivity(binding: ActivityPluginBinding) {
activity = binding.activity
binding.addRequestPermissionsResultListener(this)
}
override fun onDetachedFromActivityForConfigChanges() {
activity = null
}
override fun onReattachedToActivityForConfigChanges(binding: ActivityPluginBinding) {
activity = binding.activity
binding.addRequestPermissionsResultListener(this)
}
override fun onDetachedFromActivity() {
activity = null
}
}

View File

@ -0,0 +1 @@
{"format-version":[1,0,0],"native-assets":{}}

View File

@ -0,0 +1 @@
assets/models/README.md  assetassets/models/README.md.assets/models/decoder-epoch-99-avg-1.int8.onnx  asset.assets/models/decoder-epoch-99-avg-1.int8.onnx.assets/models/encoder-epoch-99-avg-1.int8.onnx  asset.assets/models/encoder-epoch-99-avg-1.int8.onnx-assets/models/joiner-epoch-99-avg-1.int8.onnx  asset-assets/models/joiner-epoch-99-avg-1.int8.onnxassets/models/tokens.txt  assetassets/models/tokens.txt7packages/record_web/assets/js/record.fixwebmduration.js  asset7packages/record_web/assets/js/record.fixwebmduration.js/packages/record_web/assets/js/record.worklet.js  asset/packages/record_web/assets/js/record.worklet.js

View File

@ -0,0 +1 @@
{"assets/models/README.md":["assets/models/README.md"],"assets/models/decoder-epoch-99-avg-1.int8.onnx":["assets/models/decoder-epoch-99-avg-1.int8.onnx"],"assets/models/encoder-epoch-99-avg-1.int8.onnx":["assets/models/encoder-epoch-99-avg-1.int8.onnx"],"assets/models/joiner-epoch-99-avg-1.int8.onnx":["assets/models/joiner-epoch-99-avg-1.int8.onnx"],"assets/models/tokens.txt":["assets/models/tokens.txt"],"packages/record_web/assets/js/record.fixwebmduration.js":["packages/record_web/assets/js/record.fixwebmduration.js"],"packages/record_web/assets/js/record.worklet.js":["packages/record_web/assets/js/record.worklet.js"]}

View File

@ -0,0 +1 @@
[]

Binary file not shown.

View File

@ -0,0 +1 @@
{"format-version":[1,0,0],"native-assets":{}}

View File

@ -0,0 +1,56 @@
# 语音识别模型文件
这个目录包含了 sherpa_onnx 语音识别所需的模型文件。
## 模型结构
```
assets/models/
├── zh-cn/ # 中文模型
│ ├── encoder.onnx # 编码器模型
│ ├── decoder.onnx # 解码器模型
│ ├── joiner.onnx # 连接器模型
│ └── tokens.txt # 词汇表
├── en-us/ # 英文模型
│ ├── encoder.onnx
│ ├── decoder.onnx
│ ├── joiner.onnx
│ └── tokens.txt
└── multilingual/ # 多语言模型
├── encoder.onnx
├── decoder.onnx
├── joiner.onnx
└── tokens.txt
```
## 模型下载
由于模型文件较大,请从以下地址下载对应的模型文件:
### 中文模型 (推荐)
- 模型名称: sherpa-onnx-streaming-zipformer-bilingual-zh-en-2023-02-20
- 下载地址: https://github.com/k2-fsa/sherpa-onnx/releases/
- 大小: ~40MB
### 英文模型
- 模型名称: sherpa-onnx-streaming-zipformer-en-2023-02-21
- 下载地址: https://github.com/k2-fsa/sherpa-onnx/releases/
- 大小: ~40MB
### 多语言模型
- 模型名称: sherpa-onnx-streaming-zipformer-multilingual-2023-02-20
- 下载地址: https://github.com/k2-fsa/sherpa-onnx/releases/
- 大小: ~60MB
## 使用说明
1. 下载对应的模型文件
2. 解压到对应的语言目录
3. 确保文件名和路径正确
4. 重新构建应用
## 注意事项
- 模型文件会增加应用包大小
- 建议根据需要只包含必要的语言模型
- 模型文件支持热更新,可以在运行时下载

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,507 @@
(function (name, definition) {
window.jsFixWebmDuration = definition();
})('fix-webm-duration', function () {
/*
* This is the list of possible WEBM file sections by their IDs.
* Possible types: Container, Binary, Uint, Int, String, Float, Date
*/
var sections = {
0xa45dfa3: { name: 'EBML', type: 'Container' },
0x286: { name: 'EBMLVersion', type: 'Uint' },
0x2f7: { name: 'EBMLReadVersion', type: 'Uint' },
0x2f2: { name: 'EBMLMaxIDLength', type: 'Uint' },
0x2f3: { name: 'EBMLMaxSizeLength', type: 'Uint' },
0x282: { name: 'DocType', type: 'String' },
0x287: { name: 'DocTypeVersion', type: 'Uint' },
0x285: { name: 'DocTypeReadVersion', type: 'Uint' },
0x6c: { name: 'Void', type: 'Binary' },
0x3f: { name: 'CRC-32', type: 'Binary' },
0xb538667: { name: 'SignatureSlot', type: 'Container' },
0x3e8a: { name: 'SignatureAlgo', type: 'Uint' },
0x3e9a: { name: 'SignatureHash', type: 'Uint' },
0x3ea5: { name: 'SignaturePublicKey', type: 'Binary' },
0x3eb5: { name: 'Signature', type: 'Binary' },
0x3e5b: { name: 'SignatureElements', type: 'Container' },
0x3e7b: { name: 'SignatureElementList', type: 'Container' },
0x2532: { name: 'SignedElement', type: 'Binary' },
0x8538067: { name: 'Segment', type: 'Container' },
0x14d9b74: { name: 'SeekHead', type: 'Container' },
0xdbb: { name: 'Seek', type: 'Container' },
0x13ab: { name: 'SeekID', type: 'Binary' },
0x13ac: { name: 'SeekPosition', type: 'Uint' },
0x549a966: { name: 'Info', type: 'Container' },
0x33a4: { name: 'SegmentUID', type: 'Binary' },
0x3384: { name: 'SegmentFilename', type: 'String' },
0x1cb923: { name: 'PrevUID', type: 'Binary' },
0x1c83ab: { name: 'PrevFilename', type: 'String' },
0x1eb923: { name: 'NextUID', type: 'Binary' },
0x1e83bb: { name: 'NextFilename', type: 'String' },
0x444: { name: 'SegmentFamily', type: 'Binary' },
0x2924: { name: 'ChapterTranslate', type: 'Container' },
0x29fc: { name: 'ChapterTranslateEditionUID', type: 'Uint' },
0x29bf: { name: 'ChapterTranslateCodec', type: 'Uint' },
0x29a5: { name: 'ChapterTranslateID', type: 'Binary' },
0xad7b1: { name: 'TimecodeScale', type: 'Uint' },
0x489: { name: 'Duration', type: 'Float' },
0x461: { name: 'DateUTC', type: 'Date' },
0x3ba9: { name: 'Title', type: 'String' },
0xd80: { name: 'MuxingApp', type: 'String' },
0x1741: { name: 'WritingApp', type: 'String' },
// 0xf43b675: { name: 'Cluster', type: 'Container' },
0x67: { name: 'Timecode', type: 'Uint' },
0x1854: { name: 'SilentTracks', type: 'Container' },
0x18d7: { name: 'SilentTrackNumber', type: 'Uint' },
0x27: { name: 'Position', type: 'Uint' },
0x2b: { name: 'PrevSize', type: 'Uint' },
0x23: { name: 'SimpleBlock', type: 'Binary' },
0x20: { name: 'BlockGroup', type: 'Container' },
0x21: { name: 'Block', type: 'Binary' },
0x22: { name: 'BlockVirtual', type: 'Binary' },
0x35a1: { name: 'BlockAdditions', type: 'Container' },
0x26: { name: 'BlockMore', type: 'Container' },
0x6e: { name: 'BlockAddID', type: 'Uint' },
0x25: { name: 'BlockAdditional', type: 'Binary' },
0x1b: { name: 'BlockDuration', type: 'Uint' },
0x7a: { name: 'ReferencePriority', type: 'Uint' },
0x7b: { name: 'ReferenceBlock', type: 'Int' },
0x7d: { name: 'ReferenceVirtual', type: 'Int' },
0x24: { name: 'CodecState', type: 'Binary' },
0x35a2: { name: 'DiscardPadding', type: 'Int' },
0xe: { name: 'Slices', type: 'Container' },
0x68: { name: 'TimeSlice', type: 'Container' },
0x4c: { name: 'LaceNumber', type: 'Uint' },
0x4d: { name: 'FrameNumber', type: 'Uint' },
0x4b: { name: 'BlockAdditionID', type: 'Uint' },
0x4e: { name: 'Delay', type: 'Uint' },
0x4f: { name: 'SliceDuration', type: 'Uint' },
0x48: { name: 'ReferenceFrame', type: 'Container' },
0x49: { name: 'ReferenceOffset', type: 'Uint' },
0x4a: { name: 'ReferenceTimeCode', type: 'Uint' },
0x2f: { name: 'EncryptedBlock', type: 'Binary' },
0x654ae6b: { name: 'Tracks', type: 'Container' },
0x2e: { name: 'TrackEntry', type: 'Container' },
0x57: { name: 'TrackNumber', type: 'Uint' },
0x33c5: { name: 'TrackUID', type: 'Uint' },
0x3: { name: 'TrackType', type: 'Uint' },
0x39: { name: 'FlagEnabled', type: 'Uint' },
0x8: { name: 'FlagDefault', type: 'Uint' },
0x15aa: { name: 'FlagForced', type: 'Uint' },
0x1c: { name: 'FlagLacing', type: 'Uint' },
0x2de7: { name: 'MinCache', type: 'Uint' },
0x2df8: { name: 'MaxCache', type: 'Uint' },
0x3e383: { name: 'DefaultDuration', type: 'Uint' },
0x34e7a: { name: 'DefaultDecodedFieldDuration', type: 'Uint' },
0x3314f: { name: 'TrackTimecodeScale', type: 'Float' },
0x137f: { name: 'TrackOffset', type: 'Int' },
0x15ee: { name: 'MaxBlockAdditionID', type: 'Uint' },
0x136e: { name: 'Name', type: 'String' },
0x2b59c: { name: 'Language', type: 'String' },
0x6: { name: 'CodecID', type: 'String' },
0x23a2: { name: 'CodecPrivate', type: 'Binary' },
0x58688: { name: 'CodecName', type: 'String' },
0x3446: { name: 'AttachmentLink', type: 'Uint' },
0x1a9697: { name: 'CodecSettings', type: 'String' },
0x1b4040: { name: 'CodecInfoURL', type: 'String' },
0x6b240: { name: 'CodecDownloadURL', type: 'String' },
0x2a: { name: 'CodecDecodeAll', type: 'Uint' },
0x2fab: { name: 'TrackOverlay', type: 'Uint' },
0x16aa: { name: 'CodecDelay', type: 'Uint' },
0x16bb: { name: 'SeekPreRoll', type: 'Uint' },
0x2624: { name: 'TrackTranslate', type: 'Container' },
0x26fc: { name: 'TrackTranslateEditionUID', type: 'Uint' },
0x26bf: { name: 'TrackTranslateCodec', type: 'Uint' },
0x26a5: { name: 'TrackTranslateTrackID', type: 'Binary' },
0x60: { name: 'Video', type: 'Container' },
0x1a: { name: 'FlagInterlaced', type: 'Uint' },
0x13b8: { name: 'StereoMode', type: 'Uint' },
0x13c0: { name: 'AlphaMode', type: 'Uint' },
0x13b9: { name: 'OldStereoMode', type: 'Uint' },
0x30: { name: 'PixelWidth', type: 'Uint' },
0x3a: { name: 'PixelHeight', type: 'Uint' },
0x14aa: { name: 'PixelCropBottom', type: 'Uint' },
0x14bb: { name: 'PixelCropTop', type: 'Uint' },
0x14cc: { name: 'PixelCropLeft', type: 'Uint' },
0x14dd: { name: 'PixelCropRight', type: 'Uint' },
0x14b0: { name: 'DisplayWidth', type: 'Uint' },
0x14ba: { name: 'DisplayHeight', type: 'Uint' },
0x14b2: { name: 'DisplayUnit', type: 'Uint' },
0x14b3: { name: 'AspectRatioType', type: 'Uint' },
0xeb524: { name: 'ColourSpace', type: 'Binary' },
0xfb523: { name: 'GammaValue', type: 'Float' },
0x383e3: { name: 'FrameRate', type: 'Float' },
0x61: { name: 'Audio', type: 'Container' },
0x35: { name: 'SamplingFrequency', type: 'Float' },
0x38b5: { name: 'OutputSamplingFrequency', type: 'Float' },
0x1f: { name: 'Channels', type: 'Uint' },
0x3d7b: { name: 'ChannelPositions', type: 'Binary' },
0x2264: { name: 'BitDepth', type: 'Uint' },
0x62: { name: 'TrackOperation', type: 'Container' },
0x63: { name: 'TrackCombinePlanes', type: 'Container' },
0x64: { name: 'TrackPlane', type: 'Container' },
0x65: { name: 'TrackPlaneUID', type: 'Uint' },
0x66: { name: 'TrackPlaneType', type: 'Uint' },
0x69: { name: 'TrackJoinBlocks', type: 'Container' },
0x6d: { name: 'TrackJoinUID', type: 'Uint' },
0x40: { name: 'TrickTrackUID', type: 'Uint' },
0x41: { name: 'TrickTrackSegmentUID', type: 'Binary' },
0x46: { name: 'TrickTrackFlag', type: 'Uint' },
0x47: { name: 'TrickMasterTrackUID', type: 'Uint' },
0x44: { name: 'TrickMasterTrackSegmentUID', type: 'Binary' },
0x2d80: { name: 'ContentEncodings', type: 'Container' },
0x2240: { name: 'ContentEncoding', type: 'Container' },
0x1031: { name: 'ContentEncodingOrder', type: 'Uint' },
0x1032: { name: 'ContentEncodingScope', type: 'Uint' },
0x1033: { name: 'ContentEncodingType', type: 'Uint' },
0x1034: { name: 'ContentCompression', type: 'Container' },
0x254: { name: 'ContentCompAlgo', type: 'Uint' },
0x255: { name: 'ContentCompSettings', type: 'Binary' },
0x1035: { name: 'ContentEncryption', type: 'Container' },
0x7e1: { name: 'ContentEncAlgo', type: 'Uint' },
0x7e2: { name: 'ContentEncKeyID', type: 'Binary' },
0x7e3: { name: 'ContentSignature', type: 'Binary' },
0x7e4: { name: 'ContentSigKeyID', type: 'Binary' },
0x7e5: { name: 'ContentSigAlgo', type: 'Uint' },
0x7e6: { name: 'ContentSigHashAlgo', type: 'Uint' },
0xc53bb6b: { name: 'Cues', type: 'Container' },
0x3b: { name: 'CuePoint', type: 'Container' },
0x33: { name: 'CueTime', type: 'Uint' },
0x37: { name: 'CueTrackPositions', type: 'Container' },
0x77: { name: 'CueTrack', type: 'Uint' },
0x71: { name: 'CueClusterPosition', type: 'Uint' },
0x70: { name: 'CueRelativePosition', type: 'Uint' },
0x32: { name: 'CueDuration', type: 'Uint' },
0x1378: { name: 'CueBlockNumber', type: 'Uint' },
0x6a: { name: 'CueCodecState', type: 'Uint' },
0x5b: { name: 'CueReference', type: 'Container' },
0x16: { name: 'CueRefTime', type: 'Uint' },
0x17: { name: 'CueRefCluster', type: 'Uint' },
0x135f: { name: 'CueRefNumber', type: 'Uint' },
0x6b: { name: 'CueRefCodecState', type: 'Uint' },
0x941a469: { name: 'Attachments', type: 'Container' },
0x21a7: { name: 'AttachedFile', type: 'Container' },
0x67e: { name: 'FileDescription', type: 'String' },
0x66e: { name: 'FileName', type: 'String' },
0x660: { name: 'FileMimeType', type: 'String' },
0x65c: { name: 'FileData', type: 'Binary' },
0x6ae: { name: 'FileUID', type: 'Uint' },
0x675: { name: 'FileReferral', type: 'Binary' },
0x661: { name: 'FileUsedStartTime', type: 'Uint' },
0x662: { name: 'FileUsedEndTime', type: 'Uint' },
0x43a770: { name: 'Chapters', type: 'Container' },
0x5b9: { name: 'EditionEntry', type: 'Container' },
0x5bc: { name: 'EditionUID', type: 'Uint' },
0x5bd: { name: 'EditionFlagHidden', type: 'Uint' },
0x5db: { name: 'EditionFlagDefault', type: 'Uint' },
0x5dd: { name: 'EditionFlagOrdered', type: 'Uint' },
0x36: { name: 'ChapterAtom', type: 'Container' },
0x33c4: { name: 'ChapterUID', type: 'Uint' },
0x1654: { name: 'ChapterStringUID', type: 'String' },
0x11: { name: 'ChapterTimeStart', type: 'Uint' },
0x12: { name: 'ChapterTimeEnd', type: 'Uint' },
0x18: { name: 'ChapterFlagHidden', type: 'Uint' },
0x598: { name: 'ChapterFlagEnabled', type: 'Uint' },
0x2e67: { name: 'ChapterSegmentUID', type: 'Binary' },
0x2ebc: { name: 'ChapterSegmentEditionUID', type: 'Uint' },
0x23c3: { name: 'ChapterPhysicalEquiv', type: 'Uint' },
0xf: { name: 'ChapterTrack', type: 'Container' },
0x9: { name: 'ChapterTrackNumber', type: 'Uint' },
0x0: { name: 'ChapterDisplay', type: 'Container' },
0x5: { name: 'ChapString', type: 'String' },
0x37c: { name: 'ChapLanguage', type: 'String' },
0x37e: { name: 'ChapCountry', type: 'String' },
0x2944: { name: 'ChapProcess', type: 'Container' },
0x2955: { name: 'ChapProcessCodecID', type: 'Uint' },
0x50d: { name: 'ChapProcessPrivate', type: 'Binary' },
0x2911: { name: 'ChapProcessCommand', type: 'Container' },
0x2922: { name: 'ChapProcessTime', type: 'Uint' },
0x2933: { name: 'ChapProcessData', type: 'Binary' },
0x254c367: { name: 'Tags', type: 'Container' },
0x3373: { name: 'Tag', type: 'Container' },
0x23c0: { name: 'Targets', type: 'Container' },
0x28ca: { name: 'TargetTypeValue', type: 'Uint' },
0x23ca: { name: 'TargetType', type: 'String' },
0x23c5: { name: 'TagTrackUID', type: 'Uint' },
0x23c9: { name: 'TagEditionUID', type: 'Uint' },
0x23c4: { name: 'TagChapterUID', type: 'Uint' },
0x23c6: { name: 'TagAttachmentUID', type: 'Uint' },
0x27c8: { name: 'SimpleTag', type: 'Container' },
0x5a3: { name: 'TagName', type: 'String' },
0x47a: { name: 'TagLanguage', type: 'String' },
0x484: { name: 'TagDefault', type: 'Uint' },
0x487: { name: 'TagString', type: 'String' },
0x485: { name: 'TagBinary', type: 'Binary' }
};
function doInherit(newClass, baseClass) {
newClass.prototype = Object.create(baseClass.prototype);
newClass.prototype.constructor = newClass;
}
function WebmBase(name, type) {
this.name = name || 'Unknown';
this.type = type || 'Unknown';
}
WebmBase.prototype.updateBySource = function () { };
WebmBase.prototype.setSource = function (source) {
this.source = source;
this.updateBySource();
};
WebmBase.prototype.updateByData = function () { };
WebmBase.prototype.setData = function (data) {
this.data = data;
this.updateByData();
};
function WebmUint(name, type) {
WebmBase.call(this, name, type || 'Uint');
}
doInherit(WebmUint, WebmBase);
function padHex(hex) {
return hex.length % 2 === 1 ? '0' + hex : hex;
}
WebmUint.prototype.updateBySource = function () {
// use hex representation of a number instead of number value
this.data = '';
for (var i = 0; i < this.source.length; i++) {
var hex = this.source[i].toString(16);
this.data += padHex(hex);
}
};
WebmUint.prototype.updateByData = function () {
var length = this.data.length / 2;
this.source = new Uint8Array(length);
for (var i = 0; i < length; i++) {
var hex = this.data.substr(i * 2, 2);
this.source[i] = parseInt(hex, 16);
}
};
WebmUint.prototype.getValue = function () {
return parseInt(this.data, 16);
};
WebmUint.prototype.setValue = function (value) {
this.setData(padHex(value.toString(16)));
};
function WebmFloat(name, type) {
WebmBase.call(this, name, type || 'Float');
}
doInherit(WebmFloat, WebmBase);
WebmFloat.prototype.getFloatArrayType = function () {
return this.source && this.source.length === 4 ? Float32Array : Float64Array;
};
WebmFloat.prototype.updateBySource = function () {
var byteArray = this.source.reverse();
var floatArrayType = this.getFloatArrayType();
var floatArray = new floatArrayType(byteArray.buffer);
this.data = floatArray[0];
};
WebmFloat.prototype.updateByData = function () {
var floatArrayType = this.getFloatArrayType();
var floatArray = new floatArrayType([this.data]);
var byteArray = new Uint8Array(floatArray.buffer);
this.source = byteArray.reverse();
};
WebmFloat.prototype.getValue = function () {
return this.data;
};
WebmFloat.prototype.setValue = function (value) {
this.setData(value);
};
function WebmContainer(name, type) {
WebmBase.call(this, name, type || 'Container');
}
doInherit(WebmContainer, WebmBase);
WebmContainer.prototype.readByte = function () {
return this.source[this.offset++];
};
WebmContainer.prototype.readUint = function () {
var firstByte = this.readByte();
var bytes = 8 - firstByte.toString(2).length;
var value = firstByte - (1 << (7 - bytes));
for (var i = 0; i < bytes; i++) {
// don't use bit operators to support x86
value *= 256;
value += this.readByte();
}
return value;
};
WebmContainer.prototype.updateBySource = function () {
this.data = [];
for (this.offset = 0; this.offset < this.source.length; this.offset = end) {
var id = this.readUint();
var len = this.readUint();
var end = Math.min(this.offset + len, this.source.length);
var data = this.source.slice(this.offset, end);
var info = sections[id] || { name: 'Unknown', type: 'Unknown' };
var ctr = WebmBase;
switch (info.type) {
case 'Container':
ctr = WebmContainer;
break;
case 'Uint':
ctr = WebmUint;
break;
case 'Float':
ctr = WebmFloat;
break;
}
var section = new ctr(info.name, info.type);
section.setSource(data);
this.data.push({
id: id,
idHex: id.toString(16),
data: section
});
}
};
WebmContainer.prototype.writeUint = function (x, draft) {
for (var bytes = 1, flag = 0x80; x >= flag && bytes < 8; bytes++, flag *= 0x80) { }
if (!draft) {
var value = flag + x;
for (var i = bytes - 1; i >= 0; i--) {
// don't use bit operators to support x86
var c = value % 256;
this.source[this.offset + i] = c;
value = (value - c) / 256;
}
}
this.offset += bytes;
};
WebmContainer.prototype.writeSections = function (draft) {
this.offset = 0;
for (var i = 0; i < this.data.length; i++) {
var section = this.data[i],
content = section.data.source,
contentLength = content.length;
this.writeUint(section.id, draft);
this.writeUint(contentLength, draft);
if (!draft) {
this.source.set(content, this.offset);
}
this.offset += contentLength;
}
return this.offset;
};
WebmContainer.prototype.updateByData = function () {
// run without accessing this.source to determine total length - need to know it to create Uint8Array
var length = this.writeSections('draft');
this.source = new Uint8Array(length);
// now really write data
this.writeSections();
};
WebmContainer.prototype.getSectionById = function (id) {
for (var i = 0; i < this.data.length; i++) {
var section = this.data[i];
if (section.id === id) {
return section.data;
}
}
return null;
};
function WebmFile(source) {
WebmContainer.call(this, 'File', 'File');
this.setSource(source);
}
doInherit(WebmFile, WebmContainer);
WebmFile.prototype.fixDuration = function (duration, options) {
var logger = options && options.logger;
if (logger === undefined) {
logger = function (message) {
console.log(message);
};
} else if (!logger) {
logger = function () { };
}
var segmentSection = this.getSectionById(0x8538067);
if (!segmentSection) {
logger('[fix-webm-duration] Segment section is missing');
return false;
}
var infoSection = segmentSection.getSectionById(0x549a966);
if (!infoSection) {
logger('[fix-webm-duration] Info section is missing');
return false;
}
var timeScaleSection = infoSection.getSectionById(0xad7b1);
if (!timeScaleSection) {
logger('[fix-webm-duration] TimecodeScale section is missing');
return false;
}
var durationSection = infoSection.getSectionById(0x489);
if (durationSection) {
if (durationSection.getValue() <= 0) {
logger('[fix-webm-duration] Duration section is present, but the value is empty. Applying ' + duration.toLocaleString() + ' ms.');
durationSection.setValue(duration);
} else {
logger('[fix-webm-duration] Duration section is present');
return false;
}
} else {
logger('[fix-webm-duration] Duration section is missing. Applying ' + duration.toLocaleString() + ' ms.');
// append Duration section
durationSection = new WebmFloat('Duration', 'Float');
durationSection.setValue(duration);
infoSection.data.push({
id: 0x489,
data: durationSection
});
}
// set default time scale to 1 millisecond (1000000 nanoseconds)
timeScaleSection.setValue(1000000);
infoSection.updateByData();
segmentSection.updateByData();
this.updateByData();
return true;
};
WebmFile.prototype.toBlob = function (mimeType) {
return new Blob([this.source.buffer], { type: mimeType || 'audio/webm' });
};
function fixWebmDuration(blob, duration, callback, options) {
// The callback may be omitted - then the third argument is options
if (typeof callback === "object") {
options = callback;
callback = undefined;
}
if (!callback) {
return new Promise(function (resolve) {
fixWebmDuration(blob, duration, resolve, options);
});
}
try {
var reader = new FileReader();
reader.onloadend = function () {
try {
var file = new WebmFile(new Uint8Array(reader.result));
if (file.fixDuration(duration, options)) {
blob = file.toBlob(blob.type);
}
} catch (ex) {
// ignore
}
callback(blob);
};
reader.readAsArrayBuffer(blob);
} catch (ex) {
callback(blob);
}
}
// Support AMD import default
fixWebmDuration.default = fixWebmDuration;
return fixWebmDuration;
});

View File

@ -0,0 +1,407 @@
class RecorderProcessor extends AudioWorkletProcessor {
static get parameterDescriptors() {
return [
{
name: 'numChannels',
defaultValue: 1,
minValue: 1,
maxValue: 16
},
{
name: 'sampleRate',
defaultValue: 48000,
minValue: 8000,
maxValue: 96000
},
{
name: 'streamBufferSize',
defaultValue: 2048,
minValue: 256,
maxValue: 8192
}
];
}
// Buffer size compromise between size and process call frequency
_bufferSize = 2048
// The current buffer fill level
_bytesWritten = 0
// Buffer per channel
_buffers = []
// Resampler (passthrough, down or up)
_resampler = null
// Config
_numChannels = 1
_sampleRate = 48000
constructor(options) {
super(options)
this._numChannels = options.parameterData.numChannels
this._sampleRate = options.parameterData.sampleRate
this._bufferSize = options.parameterData.streamBufferSize
// Resampler(current context sample rate, desired sample rate, num channels, buffer size)
// num channels is always 1 since we resample after interleaving channels
this._resampler = new Resampler(sampleRate, this._sampleRate, 1, this._bufferSize * this._numChannels)
this.initBuffers()
}
initBuffers() {
this._bytesWritten = 0
this._buffers = []
for (let channel = 0; channel < this._numChannels; channel++) {
this._buffers[channel] = []
}
}
/**
* @returns {boolean}
*/
isBufferEmpty() {
return this._bytesWritten === 0
}
/**
* @returns {boolean}
*/
isBufferFull() {
return this._bytesWritten >= this._bufferSize
}
/**
* @param {Float32Array[][]} inputs
* @returns {boolean}
*/
process(inputs) {
if (this.isBufferFull()) {
this.flush()
}
const input = inputs[0]
if (input.length == 0) {
// Sometimes, Firefox doesn't give any input. Skip this frame to not fail.
return true
}
for (let channel = 0; channel < this._numChannels; channel++) {
// Push a copy of the array.
// The underlying implementation may reuse it which will break the recording.
this._buffers[channel].push([...input[channel % input.length]])
}
this._bytesWritten += input[0].length
return true
}
flush() {
let channels = []
for (let channel = 0; channel < this._numChannels; channel++) {
channels.push(this.mergeFloat32Arrays(this._buffers[channel], this._bytesWritten))
}
let interleaved = this.interleave(channels)
let resampled = this._resampler.resample(interleaved)
this.port.postMessage(this.floatTo16BitPCM(resampled))
this.initBuffers()
}
mergeFloat32Arrays(arrays, bytesWritten) {
let result = new Float32Array(bytesWritten)
var offset = 0
for (let i = 0; i < arrays.length; i++) {
result.set(arrays[i], offset)
offset += arrays[i].length
}
return result
}
// Interleave data from channels from LLLLRRRR to LRLRLRLR
interleave(channels) {
if (channels === 1) {
return channels[0]
}
var length = 0
for (let i = 0; i < channels.length; i++) {
length += channels[i].length
}
let result = new Float32Array(length)
var index = 0
var inputIndex = 0
while (index < length) {
for (let i = 0; i < channels.length; i++) {
result[index] = channels[i][inputIndex]
index++
}
inputIndex++
}
return result
}
floatTo16BitPCM(input) {
let output = new DataView(new ArrayBuffer(input.length * 2))
for (let i = 0; i < input.length; i++) {
let s = Math.max(-1, Math.min(1, input[i]))
let s16 = s < 0 ? s * 0x8000 : s * 0x7FFF
output.setInt16(i * 2, s16, true)
}
return new Int16Array(output.buffer)
}
}
class Resampler {
constructor(fromSampleRate, toSampleRate, channels, inputBufferSize) {
if (!fromSampleRate || !toSampleRate || !channels) {
throw (new Error("Invalid settings specified for the resampler."));
}
this.resampler = null;
this.fromSampleRate = fromSampleRate;
this.toSampleRate = toSampleRate;
this.channels = channels || 0;
this.inputBufferSize = inputBufferSize;
this.initialize()
}
initialize() {
if (this.fromSampleRate == this.toSampleRate) {
// Setup resampler bypass - Resampler just returns what was passed through
this.resampler = (buffer) => {
return buffer
};
this.ratioWeight = 1;
} else {
if (this.fromSampleRate < this.toSampleRate) {
// Use generic linear interpolation if upsampling,
// as linear interpolation produces a gradient that we want
// and works fine with two input sample points per output in this case.
this.linearInterpolation();
this.lastWeight = 1;
} else {
// Custom resampler I wrote that doesn't skip samples
// like standard linear interpolation in high downsampling.
// This is more accurate than linear interpolation on downsampling.
this.multiTap();
this.tailExists = false;
this.lastWeight = 0;
}
// Initialize the internal buffer:
this.initializeBuffers();
this.ratioWeight = this.fromSampleRate / this.toSampleRate;
}
}
bufferSlice(sliceAmount) {
//Typed array and normal array buffer section referencing:
try {
return this.outputBuffer.subarray(0, sliceAmount);
}
catch (error) {
try {
//Regular array pass:
this.outputBuffer.length = sliceAmount;
return this.outputBuffer;
}
catch (error) {
//Nightly Firefox 4 used to have the subarray function named as slice:
return this.outputBuffer.slice(0, sliceAmount);
}
}
}
initializeBuffers() {
this.outputBufferSize = (Math.ceil(this.inputBufferSize * this.toSampleRate / this.fromSampleRate / this.channels * 1.000000476837158203125) + this.channels) + this.channels;
try {
this.outputBuffer = new Float32Array(this.outputBufferSize);
this.lastOutput = new Float32Array(this.channels);
}
catch (error) {
this.outputBuffer = [];
this.lastOutput = [];
}
}
linearInterpolation() {
this.resampler = (buffer) => {
let bufferLength = buffer.length,
channels = this.channels,
outLength,
ratioWeight,
weight,
firstWeight,
secondWeight,
sourceOffset,
outputOffset,
outputBuffer,
channel;
if ((bufferLength % channels) !== 0) {
throw (new Error("Buffer was of incorrect sample length."));
}
if (bufferLength <= 0) {
return [];
}
outLength = this.outputBufferSize;
ratioWeight = this.ratioWeight;
weight = this.lastWeight;
firstWeight = 0;
secondWeight = 0;
sourceOffset = 0;
outputOffset = 0;
outputBuffer = this.outputBuffer;
for (; weight < 1; weight += ratioWeight) {
secondWeight = weight % 1;
firstWeight = 1 - secondWeight;
this.lastWeight = weight % 1;
for (channel = 0; channel < this.channels; ++channel) {
outputBuffer[outputOffset++] = (this.lastOutput[channel] * firstWeight) + (buffer[channel] * secondWeight);
}
}
weight -= 1;
for (bufferLength -= channels, sourceOffset = Math.floor(weight) * channels; outputOffset < outLength && sourceOffset < bufferLength;) {
secondWeight = weight % 1;
firstWeight = 1 - secondWeight;
for (channel = 0; channel < this.channels; ++channel) {
outputBuffer[outputOffset++] = (buffer[sourceOffset + ((channel > 0) ? (channel) : 0)] * firstWeight) + (buffer[sourceOffset + (channels + channel)] * secondWeight);
}
weight += ratioWeight;
sourceOffset = Math.floor(weight) * channels;
}
for (channel = 0; channel < channels; ++channel) {
this.lastOutput[channel] = buffer[sourceOffset++];
}
return this.bufferSlice(outputOffset);
};
}
multiTap() {
this.resampler = (buffer) => {
let bufferLength = buffer.length,
outLength,
output_variable_list,
channels = this.channels,
ratioWeight,
weight,
channel,
actualPosition,
amountToNext,
alreadyProcessedTail,
outputBuffer,
outputOffset,
currentPosition;
if ((bufferLength % channels) !== 0) {
throw (new Error("Buffer was of incorrect sample length."));
}
if (bufferLength <= 0) {
return [];
}
outLength = this.outputBufferSize;
output_variable_list = [];
ratioWeight = this.ratioWeight;
weight = 0;
actualPosition = 0;
amountToNext = 0;
alreadyProcessedTail = !this.tailExists;
this.tailExists = false;
outputBuffer = this.outputBuffer;
outputOffset = 0;
currentPosition = 0;
for (channel = 0; channel < channels; ++channel) {
output_variable_list[channel] = 0;
}
do {
if (alreadyProcessedTail) {
weight = ratioWeight;
for (channel = 0; channel < channels; ++channel) {
output_variable_list[channel] = 0;
}
} else {
weight = this.lastWeight;
for (channel = 0; channel < channels; ++channel) {
output_variable_list[channel] = this.lastOutput[channel];
}
alreadyProcessedTail = true;
}
while (weight > 0 && actualPosition < bufferLength) {
amountToNext = 1 + actualPosition - currentPosition;
if (weight >= amountToNext) {
for (channel = 0; channel < channels; ++channel) {
output_variable_list[channel] += buffer[actualPosition++] * amountToNext;
}
currentPosition = actualPosition;
weight -= amountToNext;
} else {
for (channel = 0; channel < channels; ++channel) {
output_variable_list[channel] += buffer[actualPosition + ((channel > 0) ? channel : 0)] * weight;
}
currentPosition += weight;
weight = 0;
break;
}
}
if (weight === 0) {
for (channel = 0; channel < channels; ++channel) {
outputBuffer[outputOffset++] = output_variable_list[channel] / ratioWeight;
}
} else {
this.lastWeight = weight;
for (channel = 0; channel < channels; ++channel) {
this.lastOutput[channel] = output_variable_list[channel];
}
this.tailExists = true;
break;
}
} while (actualPosition < bufferLength && outputOffset < outLength);
return this.bufferSlice(outputOffset);
};
}
resample(buffer) {
if (this.fromSampleRate == this.toSampleRate) {
this.ratioWeight = 1;
} else {
if (this.fromSampleRate < this.toSampleRate) {
this.lastWeight = 1;
} else {
this.tailExists = false;
this.lastWeight = 0;
}
this.initializeBuffers();
this.ratioWeight = this.fromSampleRate / this.toSampleRate;
}
return this.resampler(buffer)
}
}
registerProcessor("recorder.worklet", RecorderProcessor)

Binary file not shown.

280
coverage/lcov.info Normal file
View File

@ -0,0 +1,280 @@
SF:lib/src/interfaces/speech_recognition_service.dart
DA:68,2
DA:77,2
DA:78,2
DA:79,4
DA:80,4
DA:81,4
DA:82,4
DA:83,2
LF:8
LH:8
end_of_record
SF:lib/src/models/speech_recognition_result.dart
DA:15,4
DA:23,3
DA:24,3
DA:25,3
DA:26,3
DA:27,6
DA:28,7
DA:33,3
DA:34,3
DA:35,3
DA:36,3
DA:37,3
DA:38,3
DA:42,0
DA:44,0
DA:45,0
DA:46,0
DA:49,0
DA:52,0
DA:53,0
DA:54,0
DA:55,0
DA:56,0
DA:57,0
DA:60,0
DA:62,0
DA:63,0
DA:64,0
DA:65,0
LF:29
LH:13
end_of_record
SF:lib/src/models/speech_recognition_error.dart
DA:39,5
DA:46,3
DA:47,3
DA:48,3
DA:49,9
DA:50,2
DA:53,3
DA:55,3
DA:56,3
DA:61,3
DA:62,3
DA:63,6
DA:64,3
DA:65,3
DA:69,0
DA:71,0
DA:72,0
DA:75,0
DA:78,0
DA:79,0
DA:80,0
DA:81,0
DA:84,0
DA:86,0
LF:24
LH:14
end_of_record
SF:lib/src/widgets/recording_button.dart
DA:47,2
DA:63,2
DA:64,2
DA:75,2
DA:77,2
DA:78,2
DA:79,2
DA:82,2
DA:83,4
DA:87,4
DA:90,4
DA:91,2
DA:96,2
DA:98,8
DA:102,4
DA:103,2
DA:105,4
DA:115,3
DA:116,2
DA:121,5
DA:122,5
DA:123,4
DA:124,2
DA:125,1
DA:128,2
DA:130,2
DA:132,3
DA:136,2
DA:137,1
DA:140,4
DA:142,1
DA:148,2
DA:149,4
DA:152,1
DA:153,0
DA:155,4
DA:158,0
DA:160,0
DA:166,2
DA:168,2
DA:171,6
DA:172,4
DA:173,1
DA:174,2
DA:176,3
DA:179,2
DA:180,2
DA:181,2
DA:182,2
DA:183,4
DA:184,2
DA:185,4
DA:186,4
DA:187,2
DA:190,2
DA:191,2
DA:192,2
DA:193,2
DA:194,2
DA:198,2
DA:200,2
DA:201,8
DA:202,2
DA:203,2
DA:204,2
DA:205,6
DA:215,4
DA:216,2
DA:217,4
DA:225,2
DA:227,4
DA:228,2
LF:72
LH:69
end_of_record
SF:lib/src/yx_asr_service.dart
DA:14,12
DA:17,8
DA:19,4
DA:46,2
DA:56,0
DA:57,0
DA:58,0
DA:62,4
DA:63,4
DA:64,0
DA:71,2
DA:76,2
DA:81,2
DA:82,0
DA:84,0
DA:91,0
DA:94,0
DA:95,0
DA:99,0
DA:100,0
DA:101,0
DA:102,0
DA:103,0
DA:105,0
DA:114,0
DA:115,0
DA:116,0
DA:120,4
DA:129,1
DA:134,1
DA:135,1
DA:138,0
DA:143,0
DA:146,0
DA:148,0
DA:149,0
DA:152,0
DA:154,2
DA:159,1
DA:160,1
DA:166,0
DA:169,0
DA:170,0
DA:173,0
DA:174,0
DA:175,0
DA:176,0
DA:177,0
DA:180,0
DA:185,0
DA:186,0
DA:188,0
DA:193,1
DA:194,1
DA:195,1
DA:196,1
DA:197,1
DA:198,2
DA:202,2
DA:205,3
DA:208,3
DA:211,3
DA:214,2
DA:216,2
DA:220,2
DA:221,2
DA:225,1
DA:227,2
DA:231,0
DA:241,0
DA:244,0
DA:245,0
DA:246,0
DA:248,0
DA:250,0
DA:253,0
DA:254,0
DA:258,0
DA:263,2
DA:265,2
DA:266,2
DA:268,4
DA:269,0
DA:277,0
DA:278,0
DA:279,0
DA:281,0
DA:282,0
DA:289,0
DA:290,0
DA:291,0
DA:292,0
DA:293,0
DA:299,0
DA:300,0
DA:302,0
DA:303,0
DA:304,0
DA:307,0
DA:313,0
DA:314,0
DA:315,0
DA:316,0
DA:317,0
DA:320,0
DA:325,0
DA:328,0
DA:329,0
DA:335,0
DA:341,0
DA:347,0
DA:351,2
DA:353,2
DA:358,4
DA:362,1
DA:363,1
DA:364,1
DA:365,1
DA:366,1
DA:367,1
DA:368,1
DA:369,1
DA:373,1
DA:374,1
DA:375,2
DA:376,2
DA:377,2
LF:127
LH:52
end_of_record

39
debug_app.sh Executable file
View File

@ -0,0 +1,39 @@
#!/bin/bash
# 调试应用脚本 - 解决 CocoaPods 环境问题
echo "🔧 开始调试 YX ASR 应用..."
# 1. 清理环境
echo "🧹 清理构建环境..."
flutter clean
# 2. 获取依赖
echo "📦 获取 Flutter 依赖..."
flutter pub get
# 3. 使用正确的 CocoaPods 安装 iOS 依赖
echo "🍎 安装 iOS 依赖..."
cd ios
unset GEM_PATH
unset GEM_HOME
/opt/homebrew/bin/pod install
cd ..
# 4. 构建应用
echo "🔨 构建 iOS 应用..."
flutter build ios --debug --no-codesign
# 5. 检查构建结果
if [ $? -eq 0 ]; then
echo "✅ 构建成功!"
echo "📱 现在可以在 Xcode 中打开项目并运行:"
echo " open ios/Runner.xcworkspace"
echo ""
echo "🔍 或者尝试在真机上运行:"
echo " flutter run -d \"Max'iPhone\""
else
echo "❌ 构建失败,请检查错误信息"
fi
echo "🎯 调试完成!"

356
doc/api_reference.md Normal file
View File

@ -0,0 +1,356 @@
# YX ASR API Reference
This document provides detailed API reference for the YX ASR Flutter plugin.
## Table of Contents
- [YxAsr Class](#yxasr-class)
- [SpeechRecognitionResult](#speechrecognitionresult)
- [SpeechRecognitionError](#speechrecognitionerror)
- [RecordingButton Widget](#recordingbutton-widget)
- [Error Types](#error-types)
- [Usage Examples](#usage-examples)
## YxAsr Class
The main class for speech-to-text functionality.
### Constructor
```dart
YxAsr()
```
Returns the singleton instance of YxAsr.
### Methods
#### initialize()
```dart
Future<bool> initialize()
```
Initializes the speech recognition service. This method checks availability and requests permission if needed.
**Returns:** `true` if speech recognition is ready to use, `false` otherwise.
**Example:**
```dart
final speechToText = YxAsr();
bool initialized = await speechToText.initialize();
```
#### isAvailable()
```dart
Future<bool> isAvailable()
```
Checks if speech recognition is available on the device.
**Returns:** `true` if speech recognition is supported and available, `false` otherwise.
#### hasPermission()
```dart
Future<bool> hasPermission()
```
Checks if microphone permission is currently granted.
**Returns:** `true` if permission is granted, `false` otherwise.
#### requestPermission()
```dart
Future<bool> requestPermission()
```
Requests microphone permission from the user. On some platforms, this may show a permission dialog.
**Returns:** `true` if permission is granted, `false` otherwise.
#### startListening()
```dart
Future<void> startListening({
String localeId = 'en-US',
bool partialResults = true,
bool onDevice = false,
})
```
Starts listening for speech input.
**Parameters:**
- `localeId` (String): The locale for speech recognition (e.g., 'en-US', 'zh-CN'). Default: 'en-US'
- `partialResults` (bool): Whether to return partial/interim results during recognition. Default: true
- `onDevice` (bool): Whether to use on-device recognition (iOS only, ignored on Android). Default: false
**Throws:** `PlatformException` if speech recognition fails to start.
#### stopListening()
```dart
Future<void> stopListening()
```
Stops listening for speech input. This will finalize the current recognition session and return the final result through the `onResult` stream.
#### cancel()
```dart
Future<void> cancel()
```
Cancels the current speech recognition session. This immediately stops recognition without returning a final result.
#### isListening
```dart
Future<bool> get isListening
```
Returns `true` if currently listening for speech, `false` otherwise.
### Streams
#### onResult
```dart
Stream<SpeechRecognitionResult> get onResult
```
Stream of speech recognition results. This stream emits `SpeechRecognitionResult` objects containing recognized text, confidence level, and whether the result is final or interim.
#### onError
```dart
Stream<SpeechRecognitionError> get onError
```
Stream of speech recognition errors. This stream emits `SpeechRecognitionError` objects when errors occur during speech recognition.
#### onListeningStatusChanged
```dart
Stream<bool> get onListeningStatusChanged
```
Stream of listening status changes. This stream emits `true` when speech recognition starts listening and `false` when it stops listening.
## SpeechRecognitionResult
Represents the result of speech recognition.
### Properties
```dart
class SpeechRecognitionResult {
final String recognizedWords; // The recognized text
final bool finalResult; // Whether this is a final result or partial/interim result
final double confidence; // Confidence level of the recognition (0.0 to 1.0)
final List<String> alternatives; // Alternative recognition results
}
```
### Methods
#### fromMap()
```dart
factory SpeechRecognitionResult.fromMap(Map<String, dynamic> map)
```
Creates a `SpeechRecognitionResult` from a map.
#### toMap()
```dart
Map<String, dynamic> toMap()
```
Converts this result to a map.
## SpeechRecognitionError
Represents an error that occurred during speech recognition.
### Properties
```dart
class SpeechRecognitionError {
final SpeechRecognitionErrorType errorType; // The type of error
final String errorMsg; // Human-readable error message
final String? errorCode; // Platform-specific error code (optional)
}
```
### Methods
#### fromMap()
```dart
factory SpeechRecognitionError.fromMap(Map<String, dynamic> map)
```
Creates a `SpeechRecognitionError` from a map.
#### toMap()
```dart
Map<String, dynamic> toMap()
```
Converts this error to a map.
## RecordingButton Widget
A customizable recording button widget for speech-to-text functionality.
### Constructor
```dart
const RecordingButton({
Key? key,
this.onResult,
this.onError,
this.onListeningStatusChanged,
this.localeId = 'en-US',
this.partialResults = true,
this.onDevice = false,
this.size = 80.0,
this.idleColor,
this.recordingColor,
this.disabledColor,
this.idleIcon,
this.recordingIcon,
this.iconSize,
this.decoration,
this.enabled = true,
this.tooltip,
})
```
### Properties
- `onResult` (Function(SpeechRecognitionResult)?): Callback called when speech recognition results are received
- `onError` (Function(SpeechRecognitionError)?): Callback called when speech recognition errors occur
- `onListeningStatusChanged` (Function(bool)?): Callback called when listening status changes
- `localeId` (String): The locale for speech recognition. Default: 'en-US'
- `partialResults` (bool): Whether to return partial/interim results. Default: true
- `onDevice` (bool): Whether to use on-device recognition (iOS only). Default: false
- `size` (double): The size of the button. Default: 80.0
- `idleColor` (Color?): The color of the button when not recording
- `recordingColor` (Color?): The color of the button when recording
- `disabledColor` (Color?): The color of the button when disabled
- `idleIcon` (IconData?): The icon to show when not recording
- `recordingIcon` (IconData?): The icon to show when recording
- `iconSize` (double?): The size of the icon
- `decoration` (Decoration?): Custom decoration for the button
- `enabled` (bool): Whether the button is enabled. Default: true
- `tooltip` (String?): Tooltip text for the button
## Error Types
```dart
enum SpeechRecognitionErrorType {
network, // Network error occurred during recognition
audio, // Audio recording error
service, // Speech recognition service error
permissionDenied, // Permission denied (microphone access)
notAvailable, // Speech recognition not available on device
cancelled, // Recognition was cancelled by user
noSpeech, // No speech detected
unknown, // Unknown error
}
```
## Usage Examples
### Basic Speech Recognition
```dart
final speechToText = YxAsr();
// Initialize
bool initialized = await speechToText.initialize();
if (!initialized) return;
// Listen for results
speechToText.onResult.listen((result) {
print('Recognized: ${result.recognizedWords}');
print('Final: ${result.finalResult}');
print('Confidence: ${result.confidence}');
});
// Listen for errors
speechToText.onError.listen((error) {
print('Error: ${error.errorMsg}');
});
// Start listening
await speechToText.startListening(
localeId: 'en-US',
partialResults: true,
);
// Stop listening
await speechToText.stopListening();
```
### Using RecordingButton
```dart
RecordingButton(
onResult: (result) {
setState(() {
recognizedText = result.recognizedWords;
});
},
onError: (error) {
showDialog(
context: context,
builder: (context) => AlertDialog(
title: Text('Error'),
content: Text(error.errorMsg),
actions: [
TextButton(
onPressed: () => Navigator.pop(context),
child: Text('OK'),
),
],
),
);
},
localeId: 'zh-CN',
size: 100.0,
recordingColor: Colors.red,
idleColor: Colors.blue,
)
```
### Error Handling
```dart
speechToText.onError.listen((error) {
switch (error.errorType) {
case SpeechRecognitionErrorType.permissionDenied:
// Request permission again
await speechToText.requestPermission();
break;
case SpeechRecognitionErrorType.network:
// Show network error message
showNetworkError();
break;
case SpeechRecognitionErrorType.noSpeech:
// Prompt user to speak
showSpeakPrompt();
break;
default:
// Handle other errors
showGenericError(error.errorMsg);
}
});
```

45
example/.gitignore vendored Normal file
View File

@ -0,0 +1,45 @@
# Miscellaneous
*.class
*.log
*.pyc
*.swp
.DS_Store
.atom/
.build/
.buildlog/
.history
.svn/
.swiftpm/
migrate_working_dir/
# IntelliJ related
*.iml
*.ipr
*.iws
.idea/
# The .vscode folder contains launch configuration and tasks you configure in
# VS Code which you may wish to be included in version control, so this line
# is commented out by default.
#.vscode/
# Flutter/Dart/Pub related
**/doc/api/
**/ios/Flutter/.last_build_id
.dart_tool/
.flutter-plugins
.flutter-plugins-dependencies
.pub-cache/
.pub/
/build/
# Symbolication related
app.*.symbols
# Obfuscation related
app.*.map.json
# Android Studio will place build artifacts here
/android/app/debug
/android/app/profile
/android/app/release

45
example/.metadata Normal file
View File

@ -0,0 +1,45 @@
# This file tracks properties of this Flutter project.
# Used by Flutter tool to assess capabilities and perform upgrades etc.
#
# This file should be version controlled and should not be manually edited.
version:
revision: "be698c48a6750c8cb8e61c740ca9991bb947aba2"
channel: "stable"
project_type: app
# Tracks metadata for the flutter migrate command
migration:
platforms:
- platform: root
create_revision: be698c48a6750c8cb8e61c740ca9991bb947aba2
base_revision: be698c48a6750c8cb8e61c740ca9991bb947aba2
- platform: android
create_revision: be698c48a6750c8cb8e61c740ca9991bb947aba2
base_revision: be698c48a6750c8cb8e61c740ca9991bb947aba2
- platform: ios
create_revision: be698c48a6750c8cb8e61c740ca9991bb947aba2
base_revision: be698c48a6750c8cb8e61c740ca9991bb947aba2
- platform: linux
create_revision: be698c48a6750c8cb8e61c740ca9991bb947aba2
base_revision: be698c48a6750c8cb8e61c740ca9991bb947aba2
- platform: macos
create_revision: be698c48a6750c8cb8e61c740ca9991bb947aba2
base_revision: be698c48a6750c8cb8e61c740ca9991bb947aba2
- platform: web
create_revision: be698c48a6750c8cb8e61c740ca9991bb947aba2
base_revision: be698c48a6750c8cb8e61c740ca9991bb947aba2
- platform: windows
create_revision: be698c48a6750c8cb8e61c740ca9991bb947aba2
base_revision: be698c48a6750c8cb8e61c740ca9991bb947aba2
# User provided section
# List of Local paths (relative to this file) that should be
# ignored by the migrate tool.
#
# Files that are not part of the templates will be ignored by default.
unmanaged_files:
- 'lib/main.dart'
- 'ios/Runner.xcodeproj/project.pbxproj'

16
example/README.md Normal file
View File

@ -0,0 +1,16 @@
# yx_asr_example
A new Flutter project.
## Getting Started
This project is a starting point for a Flutter application.
A few resources to get you started if this is your first Flutter project:
- [Lab: Write your first Flutter app](https://docs.flutter.dev/get-started/codelab)
- [Cookbook: Useful Flutter samples](https://docs.flutter.dev/cookbook)
For help getting started with Flutter development, view the
[online documentation](https://docs.flutter.dev/), which offers tutorials,
samples, guidance on mobile development, and a full API reference.

View File

@ -0,0 +1,28 @@
# This file configures the analyzer, which statically analyzes Dart code to
# check for errors, warnings, and lints.
#
# The issues identified by the analyzer are surfaced in the UI of Dart-enabled
# IDEs (https://dart.dev/tools#ides-and-editors). The analyzer can also be
# invoked from the command line by running `flutter analyze`.
# The following line activates a set of recommended lints for Flutter apps,
# packages, and plugins designed to encourage good coding practices.
include: package:flutter_lints/flutter.yaml
linter:
# The lint rules applied to this project can be customized in the
# section below to disable rules from the `package:flutter_lints/flutter.yaml`
# included above or to enable additional rules. A list of all available lints
# and their documentation is published at https://dart.dev/lints.
#
# Instead of disabling a lint rule for the entire project in the
# section below, it can also be suppressed for a single line of code
# or a specific dart file by using the `// ignore: name_of_lint` and
# `// ignore_for_file: name_of_lint` syntax on the line or in the file
# producing the lint.
rules:
# avoid_print: false # Uncomment to disable the `avoid_print` rule
# prefer_single_quotes: true # Uncomment to enable the `prefer_single_quotes` rule
# Additional information about this file can be found at
# https://dart.dev/guides/language/analysis-options

14
example/android/.gitignore vendored Normal file
View File

@ -0,0 +1,14 @@
gradle-wrapper.jar
/.gradle
/captures/
/gradlew
/gradlew.bat
/local.properties
GeneratedPluginRegistrant.java
.cxx/
# Remember to never publicly share your keystore.
# See https://flutter.dev/to/reference-keystore
key.properties
**/*.keystore
**/*.jks

View File

@ -0,0 +1,2 @@
#Tue Aug 26 20:08:19 CST 2025
gradle.version=8.9

View File

@ -0,0 +1,65 @@
plugins {
id "com.android.application"
id "kotlin-android"
id "dev.flutter.flutter-gradle-plugin"
}
def localProperties = new Properties()
def localPropertiesFile = rootProject.file('local.properties')
if (localPropertiesFile.exists()) {
localPropertiesFile.withReader('UTF-8') { reader ->
localProperties.load(reader)
}
}
def flutterRoot = localProperties.getProperty('flutter.sdk')
if (flutterRoot == null) {
throw new GradleException("Flutter SDK not found. Define location with flutter.sdk in the local.properties file.")
}
def flutterVersionCode = localProperties.getProperty('flutter.versionCode')
if (flutterVersionCode == null) {
flutterVersionCode = '1'
}
def flutterVersionName = localProperties.getProperty('flutter.versionName')
if (flutterVersionName == null) {
flutterVersionName = '1.0'
}
android {
namespace "com.yuanxuan.yx_asr_example"
compileSdk flutter.compileSdkVersion
ndkVersion flutter.ndkVersion
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
kotlinOptions {
jvmTarget = '1.8'
}
sourceSets {
main.java.srcDirs += 'src/main/kotlin'
}
defaultConfig {
applicationId "com.yuanxuan.yx_asr_example"
minSdkVersion 23
targetSdkVersion flutter.targetSdkVersion
versionCode flutterVersionCode.toInteger()
versionName flutterVersionName
}
buildTypes {
release {
signingConfig signingConfigs.debug
}
}
}
flutter {
source '../..'
}

View File

@ -0,0 +1,44 @@
plugins {
id("com.android.application")
id("kotlin-android")
// The Flutter Gradle Plugin must be applied after the Android and Kotlin Gradle plugins.
id("dev.flutter.flutter-gradle-plugin")
}
android {
namespace = "com.yuanxuan.yx_asr_example"
compileSdk = flutter.compileSdkVersion
ndkVersion = flutter.ndkVersion
compileOptions {
sourceCompatibility = JavaVersion.VERSION_11
targetCompatibility = JavaVersion.VERSION_11
}
kotlinOptions {
jvmTarget = JavaVersion.VERSION_11.toString()
}
defaultConfig {
// TODO: Specify your own unique Application ID (https://developer.android.com/studio/build/application-id.html).
applicationId = "com.yuanxuan.yx_asr_example"
// You can update the following values to match your application needs.
// For more information, see: https://flutter.dev/to/review-gradle-config.
minSdk = flutter.minSdkVersion
targetSdk = flutter.targetSdkVersion
versionCode = flutter.versionCode
versionName = flutter.versionName
}
buildTypes {
release {
// TODO: Add your own signing config for the release build.
// Signing with the debug keys for now, so `flutter run --release` works.
signingConfig = signingConfigs.getByName("debug")
}
}
}
flutter {
source = "../.."
}

View File

@ -0,0 +1,7 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<!-- The INTERNET permission is required for development. Specifically,
the Flutter tool needs it to communicate with the running application
to allow setting breakpoints, to provide hot reload, etc.
-->
<uses-permission android:name="android.permission.INTERNET"/>
</manifest>

View File

@ -0,0 +1,43 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.VIBRATE" />
<queries>
<intent>
<action android:name="android.speech.RecognitionService" />
</intent>
</queries>
<application
android:label="yx_asr_example"
android:name="${applicationName}"
android:icon="@mipmap/ic_launcher">
<activity
android:name=".MainActivity"
android:exported="true"
android:launchMode="singleTop"
android:theme="@style/LaunchTheme"
android:configChanges="orientation|keyboardHidden|keyboard|screenSize|smallestScreenSize|locale|layoutDirection|fontScale|screenLayout|density|uiMode"
android:hardwareAccelerated="true"
android:windowSoftInputMode="adjustResize">
<!-- Specifies an Android theme to apply to this Activity as soon as
the Android process has started. This theme is visible to the user
while the Flutter UI initializes. After that, this theme continues
to determine the Window background behind the Flutter UI. -->
<meta-data
android:name="io.flutter.embedding.android.NormalTheme"
android:resource="@style/NormalTheme"
/>
<intent-filter android:autoVerify="true">
<action android:name="android.intent.action.MAIN"/>
<category android:name="android.intent.category.LAUNCHER"/>
</intent-filter>
</activity>
<!-- Don't delete the meta-data below.
This is used by the Flutter tool to generate GeneratedPluginRegistrant.java -->
<meta-data
android:name="flutterEmbedding"
android:value="2" />
</application>
</manifest>

View File

@ -0,0 +1,6 @@
package com.yuanxuan.yx_asr_example
import io.flutter.embedding.android.FlutterActivity
class MainActivity: FlutterActivity() {
}

View File

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Modify this file to customize your launch splash screen -->
<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
<item android:drawable="?android:colorBackground" />
<!-- You can insert your own image assets here -->
<!-- <item>
<bitmap
android:gravity="center"
android:src="@mipmap/launch_image" />
</item> -->
</layer-list>

View File

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Modify this file to customize your launch splash screen -->
<layer-list xmlns:android="http://schemas.android.com/apk/res/android">
<item android:drawable="@android:color/white" />
<!-- You can insert your own image assets here -->
<!-- <item>
<bitmap
android:gravity="center"
android:src="@mipmap/launch_image" />
</item> -->
</layer-list>

Binary file not shown.

After

Width:  |  Height:  |  Size: 544 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 442 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 721 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

@ -0,0 +1,18 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<!-- Theme applied to the Android Window while the process is starting when the OS's Dark Mode setting is on -->
<style name="LaunchTheme" parent="@android:style/Theme.Black.NoTitleBar">
<!-- Show a splash screen on the activity. Automatically removed when
the Flutter engine draws its first frame -->
<item name="android:windowBackground">@drawable/launch_background</item>
</style>
<!-- Theme applied to the Android Window as soon as the process has started.
This theme determines the color of the Android Window while your
Flutter UI initializes, as well as behind your Flutter UI while its
running.
This Theme is only used starting with V2 of Flutter's Android embedding. -->
<style name="NormalTheme" parent="@android:style/Theme.Black.NoTitleBar">
<item name="android:windowBackground">?android:colorBackground</item>
</style>
</resources>

View File

@ -0,0 +1,18 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<!-- Theme applied to the Android Window while the process is starting when the OS's Dark Mode setting is off -->
<style name="LaunchTheme" parent="@android:style/Theme.Light.NoTitleBar">
<!-- Show a splash screen on the activity. Automatically removed when
the Flutter engine draws its first frame -->
<item name="android:windowBackground">@drawable/launch_background</item>
</style>
<!-- Theme applied to the Android Window as soon as the process has started.
This theme determines the color of the Android Window while your
Flutter UI initializes, as well as behind your Flutter UI while its
running.
This Theme is only used starting with V2 of Flutter's Android embedding. -->
<style name="NormalTheme" parent="@android:style/Theme.Light.NoTitleBar">
<item name="android:windowBackground">?android:colorBackground</item>
</style>
</resources>

View File

@ -0,0 +1,7 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<!-- The INTERNET permission is required for development. Specifically,
the Flutter tool needs it to communicate with the running application
to allow setting breakpoints, to provide hot reload, etc.
-->
<uses-permission android:name="android.permission.INTERNET"/>
</manifest>

View File

@ -0,0 +1,31 @@
buildscript {
ext.kotlin_version = '1.8.10'
repositories {
google()
mavenCentral()
}
dependencies {
classpath 'com.android.tools.build:gradle:8.3.0'
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
}
}
allprojects {
repositories {
google()
mavenCentral()
}
}
rootProject.buildDir = '../build'
subprojects {
project.buildDir = "${rootProject.buildDir}/${project.name}"
}
subprojects {
project.evaluationDependsOn(':app')
}
tasks.register("clean", Delete) {
delete rootProject.buildDir
}

View File

@ -0,0 +1,21 @@
allprojects {
repositories {
google()
mavenCentral()
}
}
val newBuildDir: Directory = rootProject.layout.buildDirectory.dir("../../build").get()
rootProject.layout.buildDirectory.value(newBuildDir)
subprojects {
val newSubprojectBuildDir: Directory = newBuildDir.dir(project.name)
project.layout.buildDirectory.value(newSubprojectBuildDir)
}
subprojects {
project.evaluationDependsOn(":app")
}
tasks.register<Delete>("clean") {
delete(rootProject.layout.buildDirectory)
}

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,3 @@
org.gradle.jvmargs=-Xmx1536M
android.useAndroidX=true
android.enableJetifier=true

View File

@ -0,0 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.12-all.zip

View File

@ -0,0 +1,25 @@
pluginManagement {
def flutterSdkPath = {
def properties = new Properties()
file("local.properties").withInputStream { properties.load(it) }
def flutterSdkPath = properties.getProperty("flutter.sdk")
assert flutterSdkPath != null, "flutter.sdk not set in local.properties"
return flutterSdkPath
}()
includeBuild("$flutterSdkPath/packages/flutter_tools/gradle")
repositories {
google()
mavenCentral()
gradlePluginPortal()
}
}
plugins {
id "dev.flutter.flutter-plugin-loader" version "1.0.0"
id "com.android.application" version "8.3.0" apply false
id "org.jetbrains.kotlin.android" version "1.8.10" apply false
}
include ":app"

View File

@ -0,0 +1,25 @@
pluginManagement {
val flutterSdkPath = run {
val properties = java.util.Properties()
file("local.properties").inputStream().use { properties.load(it) }
val flutterSdkPath = properties.getProperty("flutter.sdk")
require(flutterSdkPath != null) { "flutter.sdk not set in local.properties" }
flutterSdkPath
}
includeBuild("$flutterSdkPath/packages/flutter_tools/gradle")
repositories {
google()
mavenCentral()
gradlePluginPortal()
}
}
plugins {
id("dev.flutter.flutter-plugin-loader") version "1.0.0"
id("com.android.application") version "8.7.3" apply false
id("org.jetbrains.kotlin.android") version "2.1.0" apply false
}
include(":app")

View File

@ -0,0 +1,9 @@
---
license: apache-2.0
---
# Introduction
Models in this repo are converted from
https://huggingface.co/csukuangfj/sherpa-onnx-streaming-zipformer-zh-14M-2023-02-23
using [./export-onnx-zh-14M.sh](./export-onnx-zh-14M.sh).

View File

@ -0,0 +1,22 @@
#!/usr/bin/env bash
#
# Please download required files from
# https://huggingface.co/marcoyang/sherpa-ncnn-streaming-zipformer-zh-14M-2023-02-23
#
# Note: epoch-99.pt is a symlink to sherpa-ncnn-streaming-zipformer-zh-14M-2023-02-23/pretrained.pt
python ./pruned_transducer_stateless7_streaming/export-onnx-zh.py \
--tokens ./pruned_transducer_stateless7_streaming/14M-zh-2023-02-23/tokens.txt \
--exp-dir ./pruned_transducer_stateless7_streaming/14M-zh-2023-02-23 \
--use-averaged-model False \
--epoch 99 \
--avg 1 \
--decode-chunk-len 32 \
--num-encoder-layers "2,3,2,2,3" \
--feedforward-dims "320,320,640,640,320" \
--nhead "4,4,4,4,4" \
--encoder-dims "160,160,160,160,160" \
--attention-dims "96,96,96,96,96" \
--encoder-unmasked-dims "128,128,128,128,128" \
--decoder-dim 320 \
--joiner-dim 320

View File

@ -0,0 +1,8 @@
# Introduction
This model is converted
from
https://huggingface.co/yuekai/icefall-asr-multi-zh-hans-zipformer-large
The training code can be found at
https://github.com/k2-fsa/icefall/blob/master/egs/multi_zh-hans/ASR/RESULTS.md#multi-chinese-datasets-char-based-training-results-streaming-on-zipformer-large-model

Some files were not shown because too many files have changed in this diff Show More