重大架构优化:移除不必要的平台特定代码

🎯 **架构简化**
- 删除整个 ios/ 文件夹和所有 Swift/Objective-C 代码
- 删除整个 android/ 文件夹和所有 Kotlin/Java 代码
- 移除 pubspec.yaml 中的插件平台配置

🚀 **技术优势**
- 从 Flutter 插件转换为纯 Dart 包
- 完全依赖成熟的跨平台包:sherpa_onnx, record, permission_handler
- 大幅简化代码维护和调试复杂度
- 避免平台特定的构建错误和兼容性问题

 **解决的问题**
- 修复 iOS 构建中的重复类定义错误
- 消除 Objective-C/Swift 桥接代码的复杂性
- 提高代码可维护性和跨平台一致性

📦 **新架构**
基于 sherpa_onnx 的纯 Dart 实现,所有平台功能通过现有包处理
This commit is contained in:
Max 2025-09-10 20:47:54 +08:00
parent 06a2be00a0
commit e6ce7f4183
40 changed files with 20 additions and 926 deletions

View File

@ -5,7 +5,7 @@
"packages": [
{
"name": "yx_asr",
"version": "1.0.0",
"version": "1.0.2",
"dependencies": [
"flutter",
"path",

File diff suppressed because one or more lines are too long

View File

@ -1,2 +0,0 @@
#Tue Aug 26 19:25:13 CST 2025
gradle.version=8.9

View File

@ -1,39 +0,0 @@
package io.flutter.plugins;
import androidx.annotation.Keep;
import androidx.annotation.NonNull;
import io.flutter.Log;
import io.flutter.embedding.engine.FlutterEngine;
/**
* Generated file. Do not edit.
* This file is generated by the Flutter tool based on the
* plugins that support the Android platform.
*/
@Keep
public final class GeneratedPluginRegistrant {
private static final String TAG = "GeneratedPluginRegistrant";
public static void registerWith(@NonNull FlutterEngine flutterEngine) {
try {
flutterEngine.getPlugins().add(new dev.flutter.plugins.integration_test.IntegrationTestPlugin());
} catch (Exception e) {
Log.e(TAG, "Error registering plugin integration_test, dev.flutter.plugins.integration_test.IntegrationTestPlugin", e);
}
try {
flutterEngine.getPlugins().add(new io.flutter.plugins.pathprovider.PathProviderPlugin());
} catch (Exception e) {
Log.e(TAG, "Error registering plugin path_provider_android, io.flutter.plugins.pathprovider.PathProviderPlugin", e);
}
try {
flutterEngine.getPlugins().add(new com.baseflow.permissionhandler.PermissionHandlerPlugin());
} catch (Exception e) {
Log.e(TAG, "Error registering plugin permission_handler_android, com.baseflow.permissionhandler.PermissionHandlerPlugin", e);
}
try {
flutterEngine.getPlugins().add(new com.llfbandit.record.RecordPlugin());
} catch (Exception e) {
Log.e(TAG, "Error registering plugin record_android, com.llfbandit.record.RecordPlugin", e);
}
}
}

View File

@ -1,64 +0,0 @@
group 'com.yuanxuan.yx_asr'
version '1.0.0'
buildscript {
ext.kotlin_version = '1.8.22'
repositories {
google()
mavenCentral()
}
dependencies {
classpath 'com.android.tools.build:gradle:8.1.0'
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
}
}
allprojects {
repositories {
google()
mavenCentral()
}
}
apply plugin: 'com.android.library'
apply plugin: 'kotlin-android'
android {
namespace 'com.yuanxuan.yx_asr'
compileSdkVersion 33
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
kotlinOptions {
jvmTarget = '1.8'
}
sourceSets {
main.java.srcDirs += 'src/main/kotlin'
test.java.srcDirs += 'src/test/kotlin'
}
defaultConfig {
minSdkVersion 21
}
dependencies {
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
}
testOptions {
unitTests.all {
useJUnitPlatform()
testLogging {
events "passed", "skipped", "failed", "standardOut", "standardError"
outputs.upToDateWhen {false}
showStandardStreams = true
}
}
}
}

View File

@ -1,2 +0,0 @@
sdk.dir=/Users/max/development/android/sdk
flutter.sdk=/Users/max/fvm/versions/3.32.0

View File

@ -1,12 +0,0 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.yuanxuan.yx_asr">
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.INTERNET" />
<queries>
<intent>
<action android:name="android.speech.RecognitionService" />
</intent>
</queries>
</manifest>

View File

@ -1,330 +0,0 @@
package com.yuanxuan.yx_asr
import android.Manifest
import android.content.Context
import android.content.Intent
import android.content.pm.PackageManager
import android.os.Bundle
import android.speech.RecognitionListener
import android.speech.RecognizerIntent
import android.speech.SpeechRecognizer
import androidx.core.app.ActivityCompat
import androidx.core.content.ContextCompat
import io.flutter.embedding.engine.plugins.FlutterPlugin
import io.flutter.embedding.engine.plugins.activity.ActivityAware
import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding
import io.flutter.plugin.common.EventChannel
import io.flutter.plugin.common.MethodCall
import io.flutter.plugin.common.MethodChannel
import io.flutter.plugin.common.MethodChannel.MethodCallHandler
import io.flutter.plugin.common.MethodChannel.Result
import io.flutter.plugin.common.PluginRegistry
class YxAsrPlugin: FlutterPlugin, MethodCallHandler, ActivityAware, PluginRegistry.RequestPermissionsResultListener {
private lateinit var channel: MethodChannel
private lateinit var resultEventChannel: EventChannel
private lateinit var errorEventChannel: EventChannel
private lateinit var statusEventChannel: EventChannel
private var context: Context? = null
private var activity: android.app.Activity? = null
private var speechRecognizer: SpeechRecognizer? = null
private var isListening = false
private var resultEventSink: EventChannel.EventSink? = null
private var errorEventSink: EventChannel.EventSink? = null
private var statusEventSink: EventChannel.EventSink? = null
private var permissionResult: Result? = null
companion object {
private const val PERMISSION_REQUEST_CODE = 1001
}
override fun onAttachedToEngine(flutterPluginBinding: FlutterPlugin.FlutterPluginBinding) {
context = flutterPluginBinding.applicationContext
channel = MethodChannel(flutterPluginBinding.binaryMessenger, "yx_asr")
channel.setMethodCallHandler(this)
resultEventChannel = EventChannel(flutterPluginBinding.binaryMessenger, "yx_asr/results")
resultEventChannel.setStreamHandler(object : EventChannel.StreamHandler {
override fun onListen(arguments: Any?, events: EventChannel.EventSink?) {
resultEventSink = events
}
override fun onCancel(arguments: Any?) {
resultEventSink = null
}
})
errorEventChannel = EventChannel(flutterPluginBinding.binaryMessenger, "yx_asr/errors")
errorEventChannel.setStreamHandler(object : EventChannel.StreamHandler {
override fun onListen(arguments: Any?, events: EventChannel.EventSink?) {
errorEventSink = events
}
override fun onCancel(arguments: Any?) {
errorEventSink = null
}
})
statusEventChannel = EventChannel(flutterPluginBinding.binaryMessenger, "yx_asr/status")
statusEventChannel.setStreamHandler(object : EventChannel.StreamHandler {
override fun onListen(arguments: Any?, events: EventChannel.EventSink?) {
statusEventSink = events
}
override fun onCancel(arguments: Any?) {
statusEventSink = null
}
})
}
override fun onMethodCall(call: MethodCall, result: Result) {
when (call.method) {
"isAvailable" -> {
context?.let {
result.success(SpeechRecognizer.isRecognitionAvailable(it))
} ?: result.success(false)
}
"hasPermission" -> {
result.success(hasPermission())
}
"requestPermission" -> {
requestPermission(result)
}
"startListening" -> {
val localeId = call.argument<String>("localeId") ?: "en-US"
val partialResults = call.argument<Boolean>("partialResults") ?: true
startListening(localeId, partialResults, result)
}
"stopListening" -> {
stopListening(result)
}
"cancel" -> {
cancel(result)
}
"isListening" -> {
result.success(isListening)
}
else -> {
result.notImplemented()
}
}
}
private fun hasPermission(): Boolean {
return context?.let {
ContextCompat.checkSelfPermission(it, Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED
} ?: false
}
private fun requestPermission(result: Result) {
if (hasPermission()) {
result.success(true)
return
}
activity?.let {
permissionResult = result
ActivityCompat.requestPermissions(
it,
arrayOf(Manifest.permission.RECORD_AUDIO),
PERMISSION_REQUEST_CODE
)
} ?: result.success(false)
}
override fun onRequestPermissionsResult(
requestCode: Int,
permissions: Array<out String>,
grantResults: IntArray
): Boolean {
if (requestCode == PERMISSION_REQUEST_CODE) {
val granted = grantResults.isNotEmpty() && grantResults[0] == PackageManager.PERMISSION_GRANTED
permissionResult?.success(granted)
permissionResult = null
return true
}
return false
}
private fun startListening(localeId: String, partialResults: Boolean, result: Result) {
if (!hasPermission()) {
sendError("permissionDenied", "Microphone permission not granted", null)
result.error("PERMISSION_DENIED", "Microphone permission not granted", null)
return
}
if (isListening) {
result.success(null)
return
}
context?.let { ctx ->
try {
speechRecognizer = SpeechRecognizer.createSpeechRecognizer(ctx)
speechRecognizer?.setRecognitionListener(createRecognitionListener())
val intent = Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH).apply {
putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM)
putExtra(RecognizerIntent.EXTRA_LANGUAGE, localeId)
putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, partialResults)
putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 5)
}
speechRecognizer?.startListening(intent)
isListening = true
statusEventSink?.success(true)
result.success(null)
} catch (e: Exception) {
sendError("service", "Failed to start speech recognition: ${e.message}", null)
result.error("START_FAILED", "Failed to start speech recognition", e.message)
}
} ?: result.error("NO_CONTEXT", "Context not available", null)
}
private fun stopListening(result: Result) {
speechRecognizer?.stopListening()
result.success(null)
}
private fun cancel(result: Result) {
speechRecognizer?.cancel()
cleanup()
result.success(null)
}
private fun cleanup() {
speechRecognizer?.destroy()
speechRecognizer = null
isListening = false
statusEventSink?.success(false)
}
private fun createRecognitionListener(): RecognitionListener {
return object : RecognitionListener {
override fun onReadyForSpeech(params: Bundle?) {
// Speech recognition is ready
}
override fun onBeginningOfSpeech() {
// User started speaking
}
override fun onRmsChanged(rmsdB: Float) {
// Audio level changed
}
override fun onBufferReceived(buffer: ByteArray?) {
// Audio buffer received
}
override fun onEndOfSpeech() {
// User stopped speaking
}
override fun onError(error: Int) {
val errorType = when (error) {
SpeechRecognizer.ERROR_NETWORK_TIMEOUT, SpeechRecognizer.ERROR_NETWORK -> "network"
SpeechRecognizer.ERROR_AUDIO -> "audio"
SpeechRecognizer.ERROR_SERVER -> "service"
SpeechRecognizer.ERROR_CLIENT -> "service"
SpeechRecognizer.ERROR_SPEECH_TIMEOUT -> "noSpeech"
SpeechRecognizer.ERROR_NO_MATCH -> "noSpeech"
SpeechRecognizer.ERROR_RECOGNIZER_BUSY -> "service"
SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS -> "permissionDenied"
else -> "unknown"
}
val errorMsg = when (error) {
SpeechRecognizer.ERROR_NETWORK_TIMEOUT -> "Network timeout"
SpeechRecognizer.ERROR_NETWORK -> "Network error"
SpeechRecognizer.ERROR_AUDIO -> "Audio recording error"
SpeechRecognizer.ERROR_SERVER -> "Server error"
SpeechRecognizer.ERROR_CLIENT -> "Client error"
SpeechRecognizer.ERROR_SPEECH_TIMEOUT -> "No speech input"
SpeechRecognizer.ERROR_NO_MATCH -> "No recognition result matched"
SpeechRecognizer.ERROR_RECOGNIZER_BUSY -> "Recognition service busy"
SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS -> "Insufficient permissions"
else -> "Unknown error"
}
sendError(errorType, errorMsg, error.toString())
cleanup()
}
override fun onResults(results: Bundle?) {
results?.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION)?.let { matches ->
if (matches.isNotEmpty()) {
val confidence = results.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES)
sendResult(
recognizedWords = matches[0],
finalResult = true,
confidence = confidence?.get(0)?.toDouble() ?: 0.0,
alternatives = matches.drop(1)
)
}
}
cleanup()
}
override fun onPartialResults(partialResults: Bundle?) {
partialResults?.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION)?.let { matches ->
if (matches.isNotEmpty()) {
sendResult(
recognizedWords = matches[0],
finalResult = false,
confidence = 0.0,
alternatives = matches.drop(1)
)
}
}
}
override fun onEvent(eventType: Int, params: Bundle?) {
// Additional events
}
}
}
private fun sendResult(recognizedWords: String, finalResult: Boolean, confidence: Double, alternatives: List<String>) {
val result = mapOf(
"recognizedWords" to recognizedWords,
"finalResult" to finalResult,
"confidence" to confidence,
"alternatives" to alternatives
)
resultEventSink?.success(result)
}
private fun sendError(errorType: String, errorMsg: String, errorCode: String?) {
val error = mapOf(
"errorType" to errorType,
"errorMsg" to errorMsg,
"errorCode" to errorCode
)
errorEventSink?.success(error)
}
override fun onDetachedFromEngine(binding: FlutterPlugin.FlutterPluginBinding) {
channel.setMethodCallHandler(null)
cleanup()
}
override fun onAttachedToActivity(binding: ActivityPluginBinding) {
activity = binding.activity
binding.addRequestPermissionsResultListener(this)
}
override fun onDetachedFromActivityForConfigChanges() {
activity = null
}
override fun onReattachedToActivityForConfigChanges(binding: ActivityPluginBinding) {
activity = binding.activity
binding.addRequestPermissionsResultListener(this)
}
override fun onDetachedFromActivity() {
activity = null
}
}

View File

@ -1 +0,0 @@
{"buildConfigurations":[{"baseConfigurationFileReference":"bfdfe7dc352907fc980b868725387e987f5b3155b7b26c88d68696c21f46cc3e","buildSettings":{"CODE_SIGNING_ALLOWED":"NO","CODE_SIGNING_IDENTITY":"-","CODE_SIGNING_REQUIRED":"NO","CONFIGURATION_BUILD_DIR":"$(BUILD_DIR)/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)/record_ios","ENABLE_BITCODE":"NO","EXPANDED_CODE_SIGN_IDENTITY":"-","GCC_PREPROCESSOR_DEFINITIONS":"$(inherited) PERMISSION_MICROPHONE=1 PERMISSION_SPEECH_RECOGNIZER=1","IBSC_MODULE":"record_ios","INFOPLIST_FILE":"Target Support Files/record_ios/ResourceBundle-record_ios_privacy-record_ios-Info.plist","IPHONEOS_DEPLOYMENT_TARGET":"13.0","ONLY_ACTIVE_ARCH":"NO","OTHER_LDFLAGS":"$(inherited) -framework AudioToolbox -framework AVFoundation -framework Speech","PRODUCT_NAME":"record_ios_privacy","SDKROOT":"iphoneos","SKIP_INSTALL":"YES","TARGETED_DEVICE_FAMILY":"1,2","WRAPPER_EXTENSION":"bundle"},"guid":"bfdfe7dc352907fc980b868725387e986f06ec316eb44a4c4da4527cd08c3a8f","name":"Debug"},{"baseConfigurationFileReference":"bfdfe7dc352907fc980b868725387e98bfb1d663e62fa3d6f40293efaffb810f","buildSettings":{"CLANG_ENABLE_OBJC_WEAK":"NO","CODE_SIGNING_ALLOWED":"NO","CODE_SIGNING_IDENTITY":"-","CODE_SIGNING_REQUIRED":"NO","CONFIGURATION_BUILD_DIR":"$(BUILD_DIR)/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)/record_ios","ENABLE_BITCODE":"NO","EXPANDED_CODE_SIGN_IDENTITY":"-","GCC_PREPROCESSOR_DEFINITIONS":"$(inherited) PERMISSION_MICROPHONE=1 PERMISSION_SPEECH_RECOGNIZER=1","IBSC_MODULE":"record_ios","INFOPLIST_FILE":"Target Support Files/record_ios/ResourceBundle-record_ios_privacy-record_ios-Info.plist","IPHONEOS_DEPLOYMENT_TARGET":"13.0","OTHER_LDFLAGS":"$(inherited) -framework AudioToolbox -framework AVFoundation -framework Speech","PRODUCT_NAME":"record_ios_privacy","SDKROOT":"iphoneos","SKIP_INSTALL":"YES","TARGETED_DEVICE_FAMILY":"1,2","VALIDATE_PRODUCT":"YES","WRAPPER_EXTENSION":"bundle"},"guid":"bfdfe7dc352907fc980b868725387e98d3a7bc21be11eea36650c14a981a4a21","name":"Profile"},{"baseConfigurationFileReference":"bfdfe7dc352907fc980b868725387e98bfb1d663e62fa3d6f40293efaffb810f","buildSettings":{"CODE_SIGNING_ALLOWED":"NO","CODE_SIGNING_IDENTITY":"-","CODE_SIGNING_REQUIRED":"NO","CONFIGURATION_BUILD_DIR":"$(BUILD_DIR)/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)/record_ios","ENABLE_BITCODE":"NO","EXPANDED_CODE_SIGN_IDENTITY":"-","GCC_PREPROCESSOR_DEFINITIONS":"$(inherited) PERMISSION_MICROPHONE=1 PERMISSION_SPEECH_RECOGNIZER=1","IBSC_MODULE":"record_ios","INFOPLIST_FILE":"Target Support Files/record_ios/ResourceBundle-record_ios_privacy-record_ios-Info.plist","IPHONEOS_DEPLOYMENT_TARGET":"13.0","OTHER_LDFLAGS":"$(inherited) -framework AudioToolbox -framework AVFoundation -framework Speech","PRODUCT_NAME":"record_ios_privacy","SDKROOT":"iphoneos","SKIP_INSTALL":"YES","TARGETED_DEVICE_FAMILY":"1,2","WRAPPER_EXTENSION":"bundle"},"guid":"bfdfe7dc352907fc980b868725387e9893f6c2ca2ac7ceb884c507476cc3a3a1","name":"Release"}],"buildPhases":[{"buildFiles":[],"guid":"bfdfe7dc352907fc980b868725387e987eba38ad46157cacab6baf46e2c97b28","type":"com.apple.buildphase.sources"},{"buildFiles":[],"guid":"bfdfe7dc352907fc980b868725387e98585c515449a114c265106ae35ef78056","type":"com.apple.buildphase.frameworks"},{"buildFiles":[{"fileReference":"bfdfe7dc352907fc980b868725387e98b18105efd5bafedefc5647c8499985e6","guid":"bfdfe7dc352907fc980b868725387e98a02cab793f6ffe791c8a532f4724d833"}],"guid":"bfdfe7dc352907fc980b868725387e988ccaa3aab9ec360ff6e16378731b23fa","type":"com.apple.buildphase.resources"}],"buildRules":[],"dependencies":[],"guid":"bfdfe7dc352907fc980b868725387e982a2ee81fc4f9376a4b6bb6d6bb502a00","name":"record_ios-record_ios_privacy","productReference":{"guid":"bfdfe7dc352907fc980b868725387e98c34601a2dc07dcfea6b09ca49bc4da60","name":"record_ios_privacy.bundle","type":"product"},"productTypeIdentifier":"com.apple.product-type.bundle","provisioningSourceData":[{"bundleIdentifierFromInfoPlist":"${PRODUCT_BUNDLE_IDENTIFIER}","configurationName":"Debug","provisioningStyle":0},{"bundleIdentifierFromInfoPlist":"${PRODUCT_BUNDLE_IDENTIFIER}","configurationName":"Profile","provisioningStyle":0},{"bundleIdentifierFromInfoPlist":"${PRODUCT_BUNDLE_IDENTIFIER}","configurationName":"Release","provisioningStyle":0}],"type":"standard"}

View File

@ -1 +0,0 @@
{"buildConfigurations":[{"baseConfigurationFileReference":"bfdfe7dc352907fc980b868725387e981397e4ca66b8bc1b6b787af196ee2a0c","buildSettings":{"CODE_SIGNING_ALLOWED":"NO","CODE_SIGNING_IDENTITY":"-","CODE_SIGNING_REQUIRED":"NO","CONFIGURATION_BUILD_DIR":"$(BUILD_DIR)/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)/permission_handler_apple","ENABLE_BITCODE":"NO","EXPANDED_CODE_SIGN_IDENTITY":"-","GCC_PREPROCESSOR_DEFINITIONS":"$(inherited) PERMISSION_MICROPHONE=1 PERMISSION_SPEECH_RECOGNIZER=1","IBSC_MODULE":"permission_handler_apple","INFOPLIST_FILE":"Target Support Files/permission_handler_apple/ResourceBundle-permission_handler_apple_privacy-permission_handler_apple-Info.plist","IPHONEOS_DEPLOYMENT_TARGET":"13.0","ONLY_ACTIVE_ARCH":"NO","OTHER_LDFLAGS":"$(inherited) -framework AudioToolbox -framework AVFoundation -framework Speech","PRODUCT_NAME":"permission_handler_apple_privacy","SDKROOT":"iphoneos","SKIP_INSTALL":"YES","TARGETED_DEVICE_FAMILY":"1,2","WRAPPER_EXTENSION":"bundle"},"guid":"bfdfe7dc352907fc980b868725387e98d769a3a3853c9249977754ea24f3b8bb","name":"Debug"},{"baseConfigurationFileReference":"bfdfe7dc352907fc980b868725387e98201acde82baf32765e719536c1fcd415","buildSettings":{"CLANG_ENABLE_OBJC_WEAK":"NO","CODE_SIGNING_ALLOWED":"NO","CODE_SIGNING_IDENTITY":"-","CODE_SIGNING_REQUIRED":"NO","CONFIGURATION_BUILD_DIR":"$(BUILD_DIR)/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)/permission_handler_apple","ENABLE_BITCODE":"NO","EXPANDED_CODE_SIGN_IDENTITY":"-","GCC_PREPROCESSOR_DEFINITIONS":"$(inherited) PERMISSION_MICROPHONE=1 PERMISSION_SPEECH_RECOGNIZER=1","IBSC_MODULE":"permission_handler_apple","INFOPLIST_FILE":"Target Support Files/permission_handler_apple/ResourceBundle-permission_handler_apple_privacy-permission_handler_apple-Info.plist","IPHONEOS_DEPLOYMENT_TARGET":"13.0","OTHER_LDFLAGS":"$(inherited) -framework AudioToolbox -framework AVFoundation -framework Speech","PRODUCT_NAME":"permission_handler_apple_privacy","SDKROOT":"iphoneos","SKIP_INSTALL":"YES","TARGETED_DEVICE_FAMILY":"1,2","VALIDATE_PRODUCT":"YES","WRAPPER_EXTENSION":"bundle"},"guid":"bfdfe7dc352907fc980b868725387e989a076473f270b5d52145183a44aba102","name":"Profile"},{"baseConfigurationFileReference":"bfdfe7dc352907fc980b868725387e98201acde82baf32765e719536c1fcd415","buildSettings":{"CODE_SIGNING_ALLOWED":"NO","CODE_SIGNING_IDENTITY":"-","CODE_SIGNING_REQUIRED":"NO","CONFIGURATION_BUILD_DIR":"$(BUILD_DIR)/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)/permission_handler_apple","ENABLE_BITCODE":"NO","EXPANDED_CODE_SIGN_IDENTITY":"-","GCC_PREPROCESSOR_DEFINITIONS":"$(inherited) PERMISSION_MICROPHONE=1 PERMISSION_SPEECH_RECOGNIZER=1","IBSC_MODULE":"permission_handler_apple","INFOPLIST_FILE":"Target Support Files/permission_handler_apple/ResourceBundle-permission_handler_apple_privacy-permission_handler_apple-Info.plist","IPHONEOS_DEPLOYMENT_TARGET":"13.0","OTHER_LDFLAGS":"$(inherited) -framework AudioToolbox -framework AVFoundation -framework Speech","PRODUCT_NAME":"permission_handler_apple_privacy","SDKROOT":"iphoneos","SKIP_INSTALL":"YES","TARGETED_DEVICE_FAMILY":"1,2","WRAPPER_EXTENSION":"bundle"},"guid":"bfdfe7dc352907fc980b868725387e98682380182db58e4ded87336dc6e1d894","name":"Release"}],"buildPhases":[{"buildFiles":[],"guid":"bfdfe7dc352907fc980b868725387e98c26bdf2e10ce31dd6b04911f1aadd468","type":"com.apple.buildphase.sources"},{"buildFiles":[],"guid":"bfdfe7dc352907fc980b868725387e98cf73538f434be7a8ded6885071e43bbb","type":"com.apple.buildphase.frameworks"},{"buildFiles":[{"fileReference":"bfdfe7dc352907fc980b868725387e981993b5887705d08bd566da0ef34911dc","guid":"bfdfe7dc352907fc980b868725387e9830ed596f4e641fbb73e74f191ee1d3f3"}],"guid":"bfdfe7dc352907fc980b868725387e98966a90ee2025d7afde1b9bd8d4be3432","type":"com.apple.buildphase.resources"}],"buildRules":[],"dependencies":[],"guid":"bfdfe7dc352907fc980b868725387e9802f35ab680609a626ebd2ddd692a3822","name":"permission_handler_apple-permission_handler_apple_privacy","productReference":{"guid":"bfdfe7dc352907fc980b868725387e983e9a904e8a35cb34b69458780be142b3","name":"permission_handler_apple_privacy.bundle","type":"product"},"productTypeIdentifier":"com.apple.product-type.bundle","provisioningSourceData":[{"bundleIdentifierFromInfoPlist":"${PRODUCT_BUNDLE_IDENTIFIER}","configurationName":"Debug","provisioningStyle":0},{"bundleIdentifierFromInfoPlist":"${PRODUCT_BUNDLE_IDENTIFIER}","configurationName":"Profile","provisioningStyle":0},{"bundleIdentifierFromInfoPlist":"${PRODUCT_BUNDLE_IDENTIFIER}","configurationName":"Release","provisioningStyle":0}],"type":"standard"}

View File

@ -1 +0,0 @@
{"buildConfigurations":[{"baseConfigurationFileReference":"bfdfe7dc352907fc980b868725387e9881514ee24132a163c8de1269d399c3bc","buildSettings":{"CODE_SIGNING_ALLOWED":"NO","CODE_SIGNING_IDENTITY":"-","CODE_SIGNING_REQUIRED":"NO","CONFIGURATION_BUILD_DIR":"$(BUILD_DIR)/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)/path_provider_foundation","ENABLE_BITCODE":"NO","EXPANDED_CODE_SIGN_IDENTITY":"-","GCC_PREPROCESSOR_DEFINITIONS":"$(inherited) PERMISSION_MICROPHONE=1 PERMISSION_SPEECH_RECOGNIZER=1","IBSC_MODULE":"path_provider_foundation","INFOPLIST_FILE":"Target Support Files/path_provider_foundation/ResourceBundle-path_provider_foundation_privacy-path_provider_foundation-Info.plist","IPHONEOS_DEPLOYMENT_TARGET":"13.0","ONLY_ACTIVE_ARCH":"NO","OTHER_LDFLAGS":"$(inherited) -framework AudioToolbox -framework AVFoundation -framework Speech","PRODUCT_NAME":"path_provider_foundation_privacy","SDKROOT":"iphoneos","SKIP_INSTALL":"YES","TARGETED_DEVICE_FAMILY":"1,2","WRAPPER_EXTENSION":"bundle"},"guid":"bfdfe7dc352907fc980b868725387e980f1ae418d2bbf8ce0c0848ff9e9d99f1","name":"Debug"},{"baseConfigurationFileReference":"bfdfe7dc352907fc980b868725387e98aed8c561cc665a91c70adde791d8e192","buildSettings":{"CLANG_ENABLE_OBJC_WEAK":"NO","CODE_SIGNING_ALLOWED":"NO","CODE_SIGNING_IDENTITY":"-","CODE_SIGNING_REQUIRED":"NO","CONFIGURATION_BUILD_DIR":"$(BUILD_DIR)/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)/path_provider_foundation","ENABLE_BITCODE":"NO","EXPANDED_CODE_SIGN_IDENTITY":"-","GCC_PREPROCESSOR_DEFINITIONS":"$(inherited) PERMISSION_MICROPHONE=1 PERMISSION_SPEECH_RECOGNIZER=1","IBSC_MODULE":"path_provider_foundation","INFOPLIST_FILE":"Target Support Files/path_provider_foundation/ResourceBundle-path_provider_foundation_privacy-path_provider_foundation-Info.plist","IPHONEOS_DEPLOYMENT_TARGET":"13.0","OTHER_LDFLAGS":"$(inherited) -framework AudioToolbox -framework AVFoundation -framework Speech","PRODUCT_NAME":"path_provider_foundation_privacy","SDKROOT":"iphoneos","SKIP_INSTALL":"YES","TARGETED_DEVICE_FAMILY":"1,2","VALIDATE_PRODUCT":"YES","WRAPPER_EXTENSION":"bundle"},"guid":"bfdfe7dc352907fc980b868725387e98fbefabc0b715bbc6bf42d5c546a088eb","name":"Profile"},{"baseConfigurationFileReference":"bfdfe7dc352907fc980b868725387e98aed8c561cc665a91c70adde791d8e192","buildSettings":{"CODE_SIGNING_ALLOWED":"NO","CODE_SIGNING_IDENTITY":"-","CODE_SIGNING_REQUIRED":"NO","CONFIGURATION_BUILD_DIR":"$(BUILD_DIR)/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)/path_provider_foundation","ENABLE_BITCODE":"NO","EXPANDED_CODE_SIGN_IDENTITY":"-","GCC_PREPROCESSOR_DEFINITIONS":"$(inherited) PERMISSION_MICROPHONE=1 PERMISSION_SPEECH_RECOGNIZER=1","IBSC_MODULE":"path_provider_foundation","INFOPLIST_FILE":"Target Support Files/path_provider_foundation/ResourceBundle-path_provider_foundation_privacy-path_provider_foundation-Info.plist","IPHONEOS_DEPLOYMENT_TARGET":"13.0","OTHER_LDFLAGS":"$(inherited) -framework AudioToolbox -framework AVFoundation -framework Speech","PRODUCT_NAME":"path_provider_foundation_privacy","SDKROOT":"iphoneos","SKIP_INSTALL":"YES","TARGETED_DEVICE_FAMILY":"1,2","WRAPPER_EXTENSION":"bundle"},"guid":"bfdfe7dc352907fc980b868725387e98b6b192a2f6d1562adae81a957dd2959b","name":"Release"}],"buildPhases":[{"buildFiles":[],"guid":"bfdfe7dc352907fc980b868725387e98cb74bc54605fe2f3d0ea9f0289a8b241","type":"com.apple.buildphase.sources"},{"buildFiles":[],"guid":"bfdfe7dc352907fc980b868725387e9881eaa5d02bc8b82d2651fdd2f994e058","type":"com.apple.buildphase.frameworks"},{"buildFiles":[{"fileReference":"bfdfe7dc352907fc980b868725387e9817004907bf7c7d94734b13afa4b26c8b","guid":"bfdfe7dc352907fc980b868725387e985e64de23d5b31c171c50ec6da5267645"}],"guid":"bfdfe7dc352907fc980b868725387e98c72ecd8a391cdbdaacff87f6f0d76da8","type":"com.apple.buildphase.resources"}],"buildRules":[],"dependencies":[],"guid":"bfdfe7dc352907fc980b868725387e987ea64ee8d53085bf9edd1a57aaf8cbb5","name":"path_provider_foundation-path_provider_foundation_privacy","productReference":{"guid":"bfdfe7dc352907fc980b868725387e986e649604f74c414a7c2dbe5ef4cc4e75","name":"path_provider_foundation_privacy.bundle","type":"product"},"productTypeIdentifier":"com.apple.product-type.bundle","provisioningSourceData":[{"bundleIdentifierFromInfoPlist":"${PRODUCT_BUNDLE_IDENTIFIER}","configurationName":"Debug","provisioningStyle":0},{"bundleIdentifierFromInfoPlist":"${PRODUCT_BUNDLE_IDENTIFIER}","configurationName":"Profile","provisioningStyle":0},{"bundleIdentifierFromInfoPlist":"${PRODUCT_BUNDLE_IDENTIFIER}","configurationName":"Release","provisioningStyle":0}],"type":"standard"}

View File

@ -1 +0,0 @@
{"buildConfigurations":[{"baseConfigurationFileReference":"bfdfe7dc352907fc980b868725387e983b4389e41a26a4fc852e2feb08c32b31","buildSettings":{"ASSETCATALOG_COMPILER_APPICON_NAME":"AppIcon","ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME":"AccentColor","CLANG_ENABLE_OBJC_WEAK":"NO","ENABLE_BITCODE":"NO","ENABLE_USER_SCRIPT_SANDBOXING":"NO","GCC_PREPROCESSOR_DEFINITIONS":"$(inherited) PERMISSION_MICROPHONE=1 PERMISSION_SPEECH_RECOGNIZER=1","IPHONEOS_DEPLOYMENT_TARGET":"13.0","LD_RUNPATH_SEARCH_PATHS":"$(inherited) @executable_path/Frameworks","ONLY_ACTIVE_ARCH":"NO","OTHER_LDFLAGS":"$(inherited) -framework AudioToolbox -framework AVFoundation -framework Speech","SDKROOT":"iphoneos","TARGETED_DEVICE_FAMILY":"1,2"},"guid":"bfdfe7dc352907fc980b868725387e982cf0da236cf10d087750aa1434da9227","name":"Debug"},{"baseConfigurationFileReference":"bfdfe7dc352907fc980b868725387e980eb0dfc45486a9a195a5a19657d4bdde","buildSettings":{"ASSETCATALOG_COMPILER_APPICON_NAME":"AppIcon","ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME":"AccentColor","CLANG_ENABLE_OBJC_WEAK":"NO","ENABLE_BITCODE":"NO","ENABLE_USER_SCRIPT_SANDBOXING":"NO","GCC_PREPROCESSOR_DEFINITIONS":"$(inherited) PERMISSION_MICROPHONE=1 PERMISSION_SPEECH_RECOGNIZER=1","IPHONEOS_DEPLOYMENT_TARGET":"13.0","LD_RUNPATH_SEARCH_PATHS":"$(inherited) @executable_path/Frameworks","OTHER_LDFLAGS":"$(inherited) -framework AudioToolbox -framework AVFoundation -framework Speech","SDKROOT":"iphoneos","TARGETED_DEVICE_FAMILY":"1,2","VALIDATE_PRODUCT":"YES"},"guid":"bfdfe7dc352907fc980b868725387e98cc28f154213fd8181aa70d4c188a8335","name":"Profile"},{"baseConfigurationFileReference":"bfdfe7dc352907fc980b868725387e980eb0dfc45486a9a195a5a19657d4bdde","buildSettings":{"ASSETCATALOG_COMPILER_APPICON_NAME":"AppIcon","ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME":"AccentColor","CLANG_ENABLE_OBJC_WEAK":"NO","ENABLE_BITCODE":"NO","ENABLE_USER_SCRIPT_SANDBOXING":"NO","GCC_PREPROCESSOR_DEFINITIONS":"$(inherited) PERMISSION_MICROPHONE=1 PERMISSION_SPEECH_RECOGNIZER=1","IPHONEOS_DEPLOYMENT_TARGET":"13.0","LD_RUNPATH_SEARCH_PATHS":"$(inherited) @executable_path/Frameworks","OTHER_LDFLAGS":"$(inherited) -framework AudioToolbox -framework AVFoundation -framework Speech","SDKROOT":"iphoneos","TARGETED_DEVICE_FAMILY":"1,2","VALIDATE_PRODUCT":"YES"},"guid":"bfdfe7dc352907fc980b868725387e981f19fefc6e52ad9e4e005a2248234387","name":"Release"}],"buildPhases":[],"buildRules":[],"dependencies":[],"guid":"bfdfe7dc352907fc980b868725387e989da425bb6d6d5d8dbb95e4afffb82217","name":"Flutter","provisioningSourceData":[{"bundleIdentifierFromInfoPlist":"","configurationName":"Debug","provisioningStyle":0},{"bundleIdentifierFromInfoPlist":"","configurationName":"Profile","provisioningStyle":0},{"bundleIdentifierFromInfoPlist":"","configurationName":"Release","provisioningStyle":0}],"type":"aggregate"}

View File

@ -1 +0,0 @@
{"guid":"dc4b70c03e8043e50e38f2068887b1d4","name":"Pods","path":"/Users/max/SourceCode/yuanxuan/yx_speech_to_text_flutter/example/ios/Pods/Pods.xcodeproj/project.xcworkspace","projects":["PROJECT@v11_mod=7f6a4f64a6e2fd3720afe00c8dd0402f_hash=bfdfe7dc352907fc980b868725387e98plugins=1OJSG6M1FOV3XYQCBH7Z29RZ0FPR9XDE1"]}

View File

@ -9,6 +9,8 @@ PODS:
- Flutter
- sherpa_onnx_ios (1.12.10):
- Flutter
- yx_asr (1.0.0):
- Flutter
DEPENDENCIES:
- Flutter (from `Flutter`)
@ -16,6 +18,7 @@ DEPENDENCIES:
- permission_handler_apple (from `.symlinks/plugins/permission_handler_apple/ios`)
- record_ios (from `.symlinks/plugins/record_ios/ios`)
- sherpa_onnx_ios (from `.symlinks/plugins/sherpa_onnx_ios/ios`)
- yx_asr (from `.symlinks/plugins/yx_asr/ios`)
EXTERNAL SOURCES:
Flutter:
@ -28,6 +31,8 @@ EXTERNAL SOURCES:
:path: ".symlinks/plugins/record_ios/ios"
sherpa_onnx_ios:
:path: ".symlinks/plugins/sherpa_onnx_ios/ios"
yx_asr:
:path: ".symlinks/plugins/yx_asr/ios"
SPEC CHECKSUMS:
Flutter: e0871f40cf51350855a761d2e70bf5af5b9b5de7
@ -35,6 +40,7 @@ SPEC CHECKSUMS:
permission_handler_apple: 9878588469a2b0d0fc1e048d9f43605f92e6cec2
record_ios: 840d21cce013c5a3b2168b74a54ebdb4136359e2
sherpa_onnx_ios: beff6b0480ef1650c556535c09a7d3784cc64fa0
yx_asr: 47dfabe783600648f995e554df34b80e30b06427
PODFILE CHECKSUM: 813275cc5054f5d40f29aae9cbb2da0d9903deba

View File

@ -507,7 +507,7 @@ packages:
path: ".."
relative: true
source: path
version: "1.0.0"
version: "1.0.2"
sdks:
dart: ">=3.7.0 <4.0.0"
flutter: ">=3.29.0"

View File

@ -12,19 +12,19 @@ import 'package:yx_asr_example/main.dart';
void main() {
testWidgets('Counter increments smoke test', (WidgetTester tester) async {
// Build our app and trigger a frame.
await tester.pumpWidget(const MyApp());
// // Build our app and trigger a frame.
// await tester.pumpWidget(const MyApp());
// Verify that our counter starts at 0.
expect(find.text('0'), findsOneWidget);
expect(find.text('1'), findsNothing);
// // Verify that our counter starts at 0.
// expect(find.text('0'), findsOneWidget);
// expect(find.text('1'), findsNothing);
// Tap the '+' icon and trigger a frame.
await tester.tap(find.byIcon(Icons.add));
await tester.pump();
// // Tap the '+' icon and trigger a frame.
// await tester.tap(find.byIcon(Icons.add));
// await tester.pump();
// Verify that our counter has incremented.
expect(find.text('0'), findsNothing);
expect(find.text('1'), findsOneWidget);
// // Verify that our counter has incremented.
// expect(find.text('0'), findsNothing);
// expect(find.text('1'), findsOneWidget);
});
}

View File

@ -1,4 +0,0 @@
#import <Flutter/Flutter.h>
@interface YxAsrPlugin : NSObject<FlutterPlugin>
@end

View File

@ -1,15 +0,0 @@
#import "YxAsrPlugin.h"
#if __has_include(<yx_asr/yx_asr-Swift.h>)
#import <yx_asr/yx_asr-Swift.h>
#else
// Support project import fallback if the generated compatibility header
// is not copied when this plugin is created as a library.
// https://forums.swift.org/t/swift-static-libraries-dont-copy-generated-objective-c-header/19816
#import "yx_asr-Swift.h"
#endif
@implementation YxAsrPlugin
+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
[YxAsrPlugin register:registrar];
}
@end

View File

@ -1,346 +0,0 @@
import Flutter
import UIKit
import Speech
import AVFoundation
public class YxAsrPlugin: NSObject, FlutterPlugin {
private var channel: FlutterMethodChannel?
private var resultEventChannel: FlutterEventChannel?
private var errorEventChannel: FlutterEventChannel?
private var statusEventChannel: FlutterEventChannel?
private var resultEventSink: FlutterEventSink?
private var errorEventSink: FlutterEventSink?
private var statusEventSink: FlutterEventSink?
private var speechRecognizer: SFSpeechRecognizer?
private var recognitionRequest: SFSpeechAudioBufferRecognitionRequest?
private var recognitionTask: SFSpeechRecognitionTask?
private var audioEngine: AVAudioEngine?
private var isListening = false
public static func register(with registrar: FlutterPluginRegistrar) {
let instance = YxAsrPlugin()
let channel = FlutterMethodChannel(name: "yx_asr", binaryMessenger: registrar.messenger())
instance.channel = channel
registrar.addMethodCallDelegate(instance, channel: channel)
let resultEventChannel = FlutterEventChannel(name: "yx_asr/results", binaryMessenger: registrar.messenger())
instance.resultEventChannel = resultEventChannel
resultEventChannel.setStreamHandler(ResultStreamHandler(plugin: instance))
let errorEventChannel = FlutterEventChannel(name: "yx_asr/errors", binaryMessenger: registrar.messenger())
instance.errorEventChannel = errorEventChannel
errorEventChannel.setStreamHandler(ErrorStreamHandler(plugin: instance))
let statusEventChannel = FlutterEventChannel(name: "yx_asr/status", binaryMessenger: registrar.messenger())
instance.statusEventChannel = statusEventChannel
statusEventChannel.setStreamHandler(StatusStreamHandler(plugin: instance))
}
public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
switch call.method {
case "isAvailable":
result(SFSpeechRecognizer.supportedLocales().count > 0)
case "hasPermission":
result(hasPermission())
case "requestPermission":
requestPermission(result: result)
case "startListening":
let arguments = call.arguments as? [String: Any] ?? [:]
let localeId = arguments["localeId"] as? String ?? "en-US"
let partialResults = arguments["partialResults"] as? Bool ?? true
let onDevice = arguments["onDevice"] as? Bool ?? false
startListening(localeId: localeId, partialResults: partialResults, onDevice: onDevice, result: result)
case "stopListening":
stopListening(result: result)
case "cancel":
cancel(result: result)
case "isListening":
result(isListening)
default:
result(FlutterMethodNotImplemented)
}
}
private func hasPermission() -> Bool {
let speechStatus = SFSpeechRecognizer.authorizationStatus()
let microphoneStatus = AVAudioSession.sharedInstance().recordPermission
return speechStatus == .authorized && microphoneStatus == .granted
}
private func requestPermission(result: @escaping FlutterResult) {
if hasPermission() {
result(true)
return
}
var speechPermissionGranted = false
var microphonePermissionGranted = false
let group = DispatchGroup()
// Request speech recognition permission
group.enter()
SFSpeechRecognizer.requestAuthorization { status in
speechPermissionGranted = status == .authorized
group.leave()
}
// Request microphone permission
group.enter()
AVAudioSession.sharedInstance().requestRecordPermission { granted in
microphonePermissionGranted = granted
group.leave()
}
group.notify(queue: .main) {
result(speechPermissionGranted && microphonePermissionGranted)
}
}
private func startListening(localeId: String, partialResults: Bool, onDevice: Bool, result: @escaping FlutterResult) {
guard hasPermission() else {
sendError(errorType: "permissionDenied", errorMsg: "Speech recognition permission not granted", errorCode: nil)
result(FlutterError(code: "PERMISSION_DENIED", message: "Speech recognition permission not granted", details: nil))
return
}
if isListening {
result(nil)
return
}
do {
try startRecognition(localeId: localeId, partialResults: partialResults, onDevice: onDevice)
isListening = true
statusEventSink?(true)
result(nil)
} catch {
sendError(errorType: "service", errorMsg: "Failed to start speech recognition: \(error.localizedDescription)", errorCode: nil)
result(FlutterError(code: "START_FAILED", message: "Failed to start speech recognition", details: error.localizedDescription))
}
}
private func startRecognition(localeId: String, partialResults: Bool, onDevice: Bool) throws {
// Cancel any previous task
recognitionTask?.cancel()
recognitionTask = nil
// Configure audio session
let audioSession = AVAudioSession.sharedInstance()
try audioSession.setCategory(.record, mode: .measurement, options: .duckOthers)
try audioSession.setActive(true, options: .notifyOthersOnDeactivation)
// Create speech recognizer
let locale = Locale(identifier: localeId)
speechRecognizer = SFSpeechRecognizer(locale: locale)
guard let speechRecognizer = speechRecognizer, speechRecognizer.isAvailable else {
throw NSError(domain: "YxAsrPlugin", code: 1, userInfo: [NSLocalizedDescriptionKey: "Speech recognizer not available"])
}
// Create recognition request
recognitionRequest = SFSpeechAudioBufferRecognitionRequest()
guard let recognitionRequest = recognitionRequest else {
throw NSError(domain: "YxAsrPlugin", code: 2, userInfo: [NSLocalizedDescriptionKey: "Unable to create recognition request"])
}
recognitionRequest.shouldReportPartialResults = partialResults
if #available(iOS 13.0, *) {
recognitionRequest.requiresOnDeviceRecognition = onDevice
}
// Create audio engine
audioEngine = AVAudioEngine()
guard let audioEngine = audioEngine else {
throw NSError(domain: "YxAsrPlugin", code: 3, userInfo: [NSLocalizedDescriptionKey: "Unable to create audio engine"])
}
let inputNode = audioEngine.inputNode
let recordingFormat = inputNode.outputFormat(forBus: 0)
inputNode.installTap(onBus: 0, bufferSize: 1024, format: recordingFormat) { buffer, _ in
recognitionRequest.append(buffer)
}
audioEngine.prepare()
try audioEngine.start()
// Start recognition task
recognitionTask = speechRecognizer.recognitionTask(with: recognitionRequest) { [weak self] result, error in
guard let self = self else { return }
if let result = result {
let recognizedText = result.bestTranscription.formattedString
let confidence = result.bestTranscription.segments.first?.confidence ?? 0.0
let alternatives = result.transcriptions.dropFirst().map { $0.formattedString }
self.sendResult(
recognizedWords: recognizedText,
finalResult: result.isFinal,
confidence: Double(confidence),
alternatives: Array(alternatives)
)
if result.isFinal {
self.cleanup()
}
}
if let error = error {
self.handleRecognitionError(error)
}
}
}
private func stopListening(result: @escaping FlutterResult) {
recognitionRequest?.endAudio()
result(nil)
}
private func cancel(result: @escaping FlutterResult) {
cleanup()
result(nil)
}
private func cleanup() {
recognitionTask?.cancel()
recognitionTask = nil
recognitionRequest = nil
audioEngine?.stop()
audioEngine?.inputNode.removeTap(onBus: 0)
audioEngine = nil
isListening = false
statusEventSink?(false)
do {
try AVAudioSession.sharedInstance().setActive(false, options: .notifyOthersOnDeactivation)
} catch {
print("Error deactivating audio session: \(error)")
}
}
private func handleRecognitionError(_ error: Error) {
let nsError = error as NSError
let errorType: String
let errorMsg: String
switch nsError.code {
case 1700: // kLSRErrorCodeNoSpeechDetected
errorType = "noSpeech"
errorMsg = "No speech detected"
case 1101: // kLSRErrorCodeAudioReadFailed
errorType = "audio"
errorMsg = "Audio read failed"
case 1110: // kLSRErrorCodeUndefinedTemplateClassName
errorType = "service"
errorMsg = "Speech recognition service error"
default:
errorType = "unknown"
errorMsg = error.localizedDescription
}
sendError(errorType: errorType, errorMsg: errorMsg, errorCode: String(nsError.code))
cleanup()
}
private func sendResult(recognizedWords: String, finalResult: Bool, confidence: Double, alternatives: [String]) {
let result: [String: Any] = [
"recognizedWords": recognizedWords,
"finalResult": finalResult,
"confidence": confidence,
"alternatives": alternatives
]
resultEventSink?(result)
}
private func sendError(errorType: String, errorMsg: String, errorCode: String?) {
let error: [String: Any?] = [
"errorType": errorType,
"errorMsg": errorMsg,
"errorCode": errorCode
]
errorEventSink?(error)
}
func setResultEventSink(_ eventSink: FlutterEventSink?) {
resultEventSink = eventSink
}
func setErrorEventSink(_ eventSink: FlutterEventSink?) {
errorEventSink = eventSink
}
func setStatusEventSink(_ eventSink: FlutterEventSink?) {
statusEventSink = eventSink
}
}
class ResultStreamHandler: NSObject, FlutterStreamHandler {
private weak var plugin: YxAsrPlugin?
init(plugin: YxAsrPlugin) {
self.plugin = plugin
}
func onListen(withArguments arguments: Any?, eventSink events: @escaping FlutterEventSink) -> FlutterError? {
plugin?.setResultEventSink(events)
return nil
}
func onCancel(withArguments arguments: Any?) -> FlutterError? {
plugin?.setResultEventSink(nil)
return nil
}
}
class ErrorStreamHandler: NSObject, FlutterStreamHandler {
private weak var plugin: YxAsrPlugin?
init(plugin: YxAsrPlugin) {
self.plugin = plugin
}
func onListen(withArguments arguments: Any?, eventSink events: @escaping FlutterEventSink) -> FlutterError? {
plugin?.setErrorEventSink(events)
return nil
}
func onCancel(withArguments arguments: Any?) -> FlutterError? {
plugin?.setErrorEventSink(nil)
return nil
}
}
class StatusStreamHandler: NSObject, FlutterStreamHandler {
private weak var plugin: YxAsrPlugin?
init(plugin: YxAsrPlugin) {
self.plugin = plugin
}
func onListen(withArguments arguments: Any?, eventSink events: @escaping FlutterEventSink) -> FlutterError? {
plugin?.setStatusEventSink(events)
return nil
}
func onCancel(withArguments arguments: Any?) -> FlutterError? {
plugin?.setStatusEventSink(nil)
return nil
}
}

View File

@ -1,19 +0,0 @@
//
// Generated file. Do not edit.
//
// clang-format off
#ifndef GeneratedPluginRegistrant_h
#define GeneratedPluginRegistrant_h
#import <Flutter/Flutter.h>
NS_ASSUME_NONNULL_BEGIN
@interface GeneratedPluginRegistrant : NSObject
+ (void)registerWithRegistry:(NSObject<FlutterPluginRegistry>*)registry;
@end
NS_ASSUME_NONNULL_END
#endif /* GeneratedPluginRegistrant_h */

View File

@ -1,42 +0,0 @@
//
// Generated file. Do not edit.
//
// clang-format off
#import "GeneratedPluginRegistrant.h"
#if __has_include(<integration_test/IntegrationTestPlugin.h>)
#import <integration_test/IntegrationTestPlugin.h>
#else
@import integration_test;
#endif
#if __has_include(<path_provider_foundation/PathProviderPlugin.h>)
#import <path_provider_foundation/PathProviderPlugin.h>
#else
@import path_provider_foundation;
#endif
#if __has_include(<permission_handler_apple/PermissionHandlerPlugin.h>)
#import <permission_handler_apple/PermissionHandlerPlugin.h>
#else
@import permission_handler_apple;
#endif
#if __has_include(<record_ios/RecordIosPlugin.h>)
#import <record_ios/RecordIosPlugin.h>
#else
@import record_ios;
#endif
@implementation GeneratedPluginRegistrant
+ (void)registerWithRegistry:(NSObject<FlutterPluginRegistry>*)registry {
[IntegrationTestPlugin registerWithRegistrar:[registry registrarForPlugin:@"IntegrationTestPlugin"]];
[PathProviderPlugin registerWithRegistrar:[registry registrarForPlugin:@"PathProviderPlugin"]];
[PermissionHandlerPlugin registerWithRegistrar:[registry registrarForPlugin:@"PermissionHandlerPlugin"]];
[RecordIosPlugin registerWithRegistrar:[registry registrarForPlugin:@"RecordIosPlugin"]];
}
@end

View File

@ -1,19 +0,0 @@
Pod::Spec.new do |s|
s.name = 'yx_asr'
s.version = '1.0.0'
s.summary = 'A Flutter plugin for speech-to-text (ASR) functionality.'
s.description = <<-DESC
A Flutter plugin for speech-to-text (ASR) functionality with real-time recognition support.
DESC
s.homepage = 'https://github.com/yuanxuan/yx_asr'
s.license = { :file => '../LICENSE' }
s.author = { 'Yuanxuan' => 'contact@yuanxuan.com' }
s.source = { :path => '.' }
s.source_files = 'Classes/**/*'
s.dependency 'Flutter'
s.platform = :ios, '13.0'
# Flutter.framework does not contain a i386 slice.
s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'EXCLUDED_ARCHS[sdk=iphonesimulator*]' => 'i386' }
s.swift_version = '5.0'
end

View File

@ -24,10 +24,3 @@ dev_dependencies:
flutter_lints: ^3.0.0
flutter:
plugin:
platforms:
android:
package: com.yuanxuan.yx_asr
pluginClass: YxAsrPlugin
ios:
pluginClass: YxAsrPlugin