From fb8c77afa47f21d53b034cc7caf93486301de1dc Mon Sep 17 00:00:00 2001 From: Amjad Amireh Date: Wed, 15 Jul 2020 12:51:39 +0300 Subject: [PATCH] add speech_to_text plugin --- pubspec.lock | 14 + pubspec.yaml | 4 + speech_to_text/.github/workflows/master.yml | 19 + speech_to_text/.gitignore | 11 + speech_to_text/.metadata | 10 + speech_to_text/CHANGELOG.md | 166 +++++ speech_to_text/LICENSE | 29 + speech_to_text/README.md | 150 +++++ speech_to_text/android/.classpath | 6 + speech_to_text/android/.gitignore | 8 + speech_to_text/android/.project | 23 + .../org.eclipse.buildship.core.prefs | 13 + speech_to_text/android/build.gradle | 44 ++ .../android/gradle/gradle.properties | 3 + .../android/gradle/local.properties | 2 + speech_to_text/android/gradle/settings.gradle | 1 + .../gradle/wrapper/gradle-wrapper.properties | 5 + .../android/src/main/AndroidManifest.xml | 3 + .../speech_to_text/SpeechToTextPlugin.kt | 595 ++++++++++++++++++ speech_to_text/example/.gitignore | 73 +++ speech_to_text/example/.metadata | 10 + speech_to_text/example/README.md | 155 +++++ speech_to_text/example/android/.project | 17 + .../org.eclipse.buildship.core.prefs | 2 + .../example/android/app/build.gradle | 67 ++ .../android/app/src/debug/AndroidManifest.xml | 7 + .../android/app/src/main/AndroidManifest.xml | 32 + .../speech_to_text_example/MainActivity.kt | 12 + .../main/res/drawable/launch_background.xml | 12 + .../src/main/res/mipmap-hdpi/ic_launcher.png | Bin 0 -> 544 bytes .../src/main/res/mipmap-mdpi/ic_launcher.png | Bin 0 -> 442 bytes .../src/main/res/mipmap-xhdpi/ic_launcher.png | Bin 0 -> 721 bytes .../main/res/mipmap-xxhdpi/ic_launcher.png | Bin 0 -> 1031 bytes .../main/res/mipmap-xxxhdpi/ic_launcher.png | Bin 0 -> 1443 bytes .../app/src/main/res/values/styles.xml | 8 + .../app/src/profile/AndroidManifest.xml | 7 + speech_to_text/example/android/build.gradle | 31 + .../example/android/gradle.properties | 4 + .../gradle/wrapper/gradle-wrapper.properties | 6 + .../example/android/settings.gradle | 15 + .../assets/sounds/speech_to_text_cancel.m4r | Bin 0 -> 14006 bytes .../sounds/speech_to_text_listening.m4r | Bin 0 -> 16416 bytes .../assets/sounds/speech_to_text_stop.m4r | Bin 0 -> 17128 bytes .../ios/Flutter/AppFrameworkInfo.plist | 26 + .../example/ios/Flutter/Debug.xcconfig | 2 + .../example/ios/Flutter/Flutter.podspec | 18 + .../example/ios/Flutter/Release.xcconfig | 2 + speech_to_text/example/ios/Podfile | 90 +++ speech_to_text/example/ios/Podfile.lock | 29 + .../ios/Runner.xcodeproj/project.pbxproj | 578 +++++++++++++++++ .../contents.xcworkspacedata | 7 + .../xcshareddata/xcschemes/Runner.xcscheme | 91 +++ .../contents.xcworkspacedata | 10 + .../xcshareddata/IDEWorkspaceChecks.plist | 8 + .../example/ios/Runner/AppDelegate.swift | 13 + .../AppIcon.appiconset/Contents.json | 122 ++++ .../Icon-App-1024x1024@1x.png | Bin 0 -> 10932 bytes .../AppIcon.appiconset/Icon-App-20x20@1x.png | Bin 0 -> 564 bytes .../AppIcon.appiconset/Icon-App-20x20@2x.png | Bin 0 -> 1283 bytes .../AppIcon.appiconset/Icon-App-20x20@3x.png | Bin 0 -> 1588 bytes .../AppIcon.appiconset/Icon-App-29x29@1x.png | Bin 0 -> 1025 bytes .../AppIcon.appiconset/Icon-App-29x29@2x.png | Bin 0 -> 1716 bytes .../AppIcon.appiconset/Icon-App-29x29@3x.png | Bin 0 -> 1920 bytes .../AppIcon.appiconset/Icon-App-40x40@1x.png | Bin 0 -> 1283 bytes .../AppIcon.appiconset/Icon-App-40x40@2x.png | Bin 0 -> 1895 bytes .../AppIcon.appiconset/Icon-App-40x40@3x.png | Bin 0 -> 2665 bytes .../AppIcon.appiconset/Icon-App-60x60@2x.png | Bin 0 -> 2665 bytes .../AppIcon.appiconset/Icon-App-60x60@3x.png | Bin 0 -> 3831 bytes .../AppIcon.appiconset/Icon-App-76x76@1x.png | Bin 0 -> 1888 bytes .../AppIcon.appiconset/Icon-App-76x76@2x.png | Bin 0 -> 3294 bytes .../Icon-App-83.5x83.5@2x.png | Bin 0 -> 3612 bytes .../LaunchImage.imageset/Contents.json | 23 + .../LaunchImage.imageset/LaunchImage.png | Bin 0 -> 68 bytes .../LaunchImage.imageset/LaunchImage@2x.png | Bin 0 -> 68 bytes .../LaunchImage.imageset/LaunchImage@3x.png | Bin 0 -> 68 bytes .../LaunchImage.imageset/README.md | 5 + .../Runner/Base.lproj/LaunchScreen.storyboard | 37 ++ .../ios/Runner/Base.lproj/Main.storyboard | 26 + speech_to_text/example/ios/Runner/Info.plist | 49 ++ .../ios/Runner/Runner-Bridging-Header.h | 1 + speech_to_text/example/lib/main.dart | 275 ++++++++ speech_to_text/example/pubspec.lock | 245 ++++++++ speech_to_text/example/pubspec.yaml | 33 + speech_to_text/example/test/widget_test.dart | 27 + speech_to_text/ios/.gitignore | 37 ++ speech_to_text/ios/Assets/.gitkeep | 0 .../ios/Classes/SpeechToTextPlugin.h | 4 + .../ios/Classes/SpeechToTextPlugin.m | 8 + .../ios/Classes/SwiftSpeechToTextPlugin.swift | 580 +++++++++++++++++ speech_to_text/ios/speech_to_text.podspec | 22 + .../lib/speech_recognition_error.dart | 44 ++ .../lib/speech_recognition_error.g.dart | 22 + .../lib/speech_recognition_event.dart | 30 + .../lib/speech_recognition_result.dart | 140 +++++ .../lib/speech_recognition_result.g.dart | 41 ++ speech_to_text/lib/speech_to_text.dart | 511 +++++++++++++++ .../lib/speech_to_text_provider.dart | 200 ++++++ speech_to_text/pubspec.lock | 483 ++++++++++++++ speech_to_text/pubspec.yaml | 31 + .../test/speech_recognition_error_test.dart | 65 ++ .../test/speech_recognition_event_test.dart | 42 ++ .../test/speech_recognition_result_test.dart | 134 ++++ .../test/speech_recognitions_words_test.dart | 86 +++ .../test/speech_to_text_provider_test.dart | 196 ++++++ speech_to_text/test/speech_to_text_test.dart | 425 +++++++++++++ .../test/test_speech_channel_handler.dart | 134 ++++ speech_to_text/test/test_speech_listener.dart | 36 ++ 107 files changed, 6552 insertions(+) create mode 100644 speech_to_text/.github/workflows/master.yml create mode 100644 speech_to_text/.gitignore create mode 100644 speech_to_text/.metadata create mode 100644 speech_to_text/CHANGELOG.md create mode 100644 speech_to_text/LICENSE create mode 100644 speech_to_text/README.md create mode 100644 speech_to_text/android/.classpath create mode 100644 speech_to_text/android/.gitignore create mode 100644 speech_to_text/android/.project create mode 100644 speech_to_text/android/.settings/org.eclipse.buildship.core.prefs create mode 100644 speech_to_text/android/build.gradle create mode 100644 speech_to_text/android/gradle/gradle.properties create mode 100644 speech_to_text/android/gradle/local.properties create mode 100644 speech_to_text/android/gradle/settings.gradle create mode 100644 speech_to_text/android/gradle/wrapper/gradle-wrapper.properties create mode 100644 speech_to_text/android/src/main/AndroidManifest.xml create mode 100644 speech_to_text/android/src/main/kotlin/com/csdcorp/speech_to_text/SpeechToTextPlugin.kt create mode 100644 speech_to_text/example/.gitignore create mode 100644 speech_to_text/example/.metadata create mode 100644 speech_to_text/example/README.md create mode 100644 speech_to_text/example/android/.project create mode 100644 speech_to_text/example/android/.settings/org.eclipse.buildship.core.prefs create mode 100644 speech_to_text/example/android/app/build.gradle create mode 100644 speech_to_text/example/android/app/src/debug/AndroidManifest.xml create mode 100644 speech_to_text/example/android/app/src/main/AndroidManifest.xml create mode 100644 speech_to_text/example/android/app/src/main/kotlin/com/csdcorp/speech_to_text_example/MainActivity.kt create mode 100644 speech_to_text/example/android/app/src/main/res/drawable/launch_background.xml create mode 100644 speech_to_text/example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png create mode 100644 speech_to_text/example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png create mode 100644 speech_to_text/example/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png create mode 100644 speech_to_text/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png create mode 100644 speech_to_text/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png create mode 100644 speech_to_text/example/android/app/src/main/res/values/styles.xml create mode 100644 speech_to_text/example/android/app/src/profile/AndroidManifest.xml create mode 100644 speech_to_text/example/android/build.gradle create mode 100644 speech_to_text/example/android/gradle.properties create mode 100644 speech_to_text/example/android/gradle/wrapper/gradle-wrapper.properties create mode 100644 speech_to_text/example/android/settings.gradle create mode 100644 speech_to_text/example/assets/sounds/speech_to_text_cancel.m4r create mode 100644 speech_to_text/example/assets/sounds/speech_to_text_listening.m4r create mode 100644 speech_to_text/example/assets/sounds/speech_to_text_stop.m4r create mode 100644 speech_to_text/example/ios/Flutter/AppFrameworkInfo.plist create mode 100644 speech_to_text/example/ios/Flutter/Debug.xcconfig create mode 100644 speech_to_text/example/ios/Flutter/Flutter.podspec create mode 100644 speech_to_text/example/ios/Flutter/Release.xcconfig create mode 100644 speech_to_text/example/ios/Podfile create mode 100644 speech_to_text/example/ios/Podfile.lock create mode 100644 speech_to_text/example/ios/Runner.xcodeproj/project.pbxproj create mode 100644 speech_to_text/example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata create mode 100644 speech_to_text/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme create mode 100644 speech_to_text/example/ios/Runner.xcworkspace/contents.xcworkspacedata create mode 100644 speech_to_text/example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist create mode 100644 speech_to_text/example/ios/Runner/AppDelegate.swift create mode 100644 speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json create mode 100644 speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png create mode 100644 speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@1x.png create mode 100644 speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@2x.png create mode 100644 speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png create mode 100644 speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@1x.png create mode 100644 speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png create mode 100644 speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@3x.png create mode 100644 speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@1x.png create mode 100644 speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png create mode 100644 speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png create mode 100644 speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png create mode 100644 speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png create mode 100644 speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png create mode 100644 speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@2x.png create mode 100644 speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-83.5x83.5@2x.png create mode 100644 speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/Contents.json create mode 100644 speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage.png create mode 100644 speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png create mode 100644 speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png create mode 100644 speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md create mode 100644 speech_to_text/example/ios/Runner/Base.lproj/LaunchScreen.storyboard create mode 100644 speech_to_text/example/ios/Runner/Base.lproj/Main.storyboard create mode 100644 speech_to_text/example/ios/Runner/Info.plist create mode 100644 speech_to_text/example/ios/Runner/Runner-Bridging-Header.h create mode 100644 speech_to_text/example/lib/main.dart create mode 100644 speech_to_text/example/pubspec.lock create mode 100644 speech_to_text/example/pubspec.yaml create mode 100644 speech_to_text/example/test/widget_test.dart create mode 100644 speech_to_text/ios/.gitignore create mode 100644 speech_to_text/ios/Assets/.gitkeep create mode 100644 speech_to_text/ios/Classes/SpeechToTextPlugin.h create mode 100644 speech_to_text/ios/Classes/SpeechToTextPlugin.m create mode 100644 speech_to_text/ios/Classes/SwiftSpeechToTextPlugin.swift create mode 100644 speech_to_text/ios/speech_to_text.podspec create mode 100644 speech_to_text/lib/speech_recognition_error.dart create mode 100644 speech_to_text/lib/speech_recognition_error.g.dart create mode 100644 speech_to_text/lib/speech_recognition_event.dart create mode 100644 speech_to_text/lib/speech_recognition_result.dart create mode 100644 speech_to_text/lib/speech_recognition_result.g.dart create mode 100644 speech_to_text/lib/speech_to_text.dart create mode 100644 speech_to_text/lib/speech_to_text_provider.dart create mode 100644 speech_to_text/pubspec.lock create mode 100644 speech_to_text/pubspec.yaml create mode 100644 speech_to_text/test/speech_recognition_error_test.dart create mode 100644 speech_to_text/test/speech_recognition_event_test.dart create mode 100644 speech_to_text/test/speech_recognition_result_test.dart create mode 100644 speech_to_text/test/speech_recognitions_words_test.dart create mode 100644 speech_to_text/test/speech_to_text_provider_test.dart create mode 100644 speech_to_text/test/speech_to_text_test.dart create mode 100644 speech_to_text/test/test_speech_channel_handler.dart create mode 100644 speech_to_text/test/test_speech_listener.dart diff --git a/pubspec.lock b/pubspec.lock index 71445e35..7c695999 100644 --- a/pubspec.lock +++ b/pubspec.lock @@ -155,6 +155,13 @@ packages: url: "https://pub.dartlang.org" source: hosted version: "1.0.2" + clock: + dependency: transitive + description: + name: clock + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.1" code_builder: dependency: transitive description: @@ -635,6 +642,13 @@ packages: url: "https://pub.dartlang.org" source: hosted version: "1.7.0" + speech_to_text: + dependency: "direct main" + description: + path: speech_to_text + relative: true + source: path + version: "0.0.0" stack_trace: dependency: transitive description: diff --git a/pubspec.yaml b/pubspec.yaml index c6e998f2..15ea2dca 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -52,6 +52,10 @@ dependencies: #flutter_svg: ^0.17.4 percent_indicator: "^2.1.1" + #speech to text + speech_to_text: + path: speech_to_text + dev_dependencies: flutter_test: sdk: flutter diff --git a/speech_to_text/.github/workflows/master.yml b/speech_to_text/.github/workflows/master.yml new file mode 100644 index 00000000..4d4cff1c --- /dev/null +++ b/speech_to_text/.github/workflows/master.yml @@ -0,0 +1,19 @@ +name: build + +on: + push: + branches: + - master + +jobs: + test: + name: Test on Ubuntu + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v1 + - uses: subosito/flutter-action@v1.3.2 + with: + flutter-version: '1.17.1' + channel: 'stable' + - run: flutter pub get + - run: flutter test diff --git a/speech_to_text/.gitignore b/speech_to_text/.gitignore new file mode 100644 index 00000000..8969cbcd --- /dev/null +++ b/speech_to_text/.gitignore @@ -0,0 +1,11 @@ +.DS_Store +.dart_tool/ + +.packages +.pub/ + +build/ +coverage/ +example/.flutter-plugins-dependencies +**/ios/Flutter/flutter_export_environment.sh +android/.idea/ diff --git a/speech_to_text/.metadata b/speech_to_text/.metadata new file mode 100644 index 00000000..1940d996 --- /dev/null +++ b/speech_to_text/.metadata @@ -0,0 +1,10 @@ +# This file tracks properties of this Flutter project. +# Used by Flutter tool to assess capabilities and perform upgrades etc. +# +# This file should be version controlled and should not be manually edited. + +version: + revision: 2d2a1ffec95cc70a3218872a2cd3f8de4933c42f + channel: stable + +project_type: plugin diff --git a/speech_to_text/CHANGELOG.md b/speech_to_text/CHANGELOG.md new file mode 100644 index 00000000..477e110c --- /dev/null +++ b/speech_to_text/CHANGELOG.md @@ -0,0 +1,166 @@ +# Changelog + +## 2.3.0 + +### New + * new parameter `onDevice` on the `listen` method enforces on device recognition for sensitive content + * onSoundLevelChange now supported on iOS + * added compile troubleshooting help to README.md + * `SpeechToTextProvider` is an alternate and simpler way to interact with the `SpeechToText` plugin. + * new `provider_example.dart` example for usage of `SpeechToTextProvider`. +### Fix + * on iOS handles some conflicts with other applications better to keep speech working after calls for example + + +## 2.2.0 + +### New + * improved error handling and logging in the iOS implementation + * added general guides for iOS to the README + * moved stress testing out of the main example + * iOS now defaults to using the speaker rather than the receiver for start /stop sounds when no headphones +### Fix + * iOS now properly deactivates the audio session when no longer listening + * start and stop sounds on iOS should be more reliable when available + +## 2.1.0 +### Breaking + * `listenFor` now calls `stop` rather than `cancel` as this seems like more useful behaviour + +### Fix + * Android no longer stops or cancels the speech recognizer if it has already been shutdown by a + timeout or other platform behaviour. + * Android no longer tries to restart the listener when it is already active + * Now properly notifies errors that happen after listening stops due to platform callback rather than + client request. See https://github.com/csdcorp/speech_to_text/issues/51 + +## 2.0.1 +### Fix + * Resolves an issue with the Android implementation not handling permission requests properly on apps + that didn't use the 1.12.x plugin APIs for registration. The permission dialog would not appear and + permission was denied. + + +## 2.0.0 + +### Breaking + + * Upgraded to New Swift 1.12 plugin structure, may work with older Flutter version but not guaranteed + +### New + + * the plugin now requests both speech and microphone permission on initialize on iOS + * added `debugLogging` parameter to the `initialize` method to control native logging + +### Fix + + * The Android implementation now blocks duplicate results notifications. It appears that at least on some + Android versions the final results notification onResults is notified twice when Android automatically + terminates the session due to a pause time. The de-duplication looks for successive notifications + with < 100 ms between them and blocks the second. If you miss any onResult notifications please post + an issue. + +## 1.1.0 + +### New + + * error_timeout has been separated into error_network_timeout and error_speech_timeout + +## 1.0.0 + +### New + * hasPermission to check for the current permission without bringing up the system dialog + * `listen` has a new optional `cancelOnError` parameter to support automatically canceling + a listening session on a permanent error. + * `listen` has a new optional `partialResults` parameter that controls whether the callback + receives partial or only final results. + +## 0.8.0 + +### New + + * speech recognizer now exposes multiple possible transcriptions for each recognized speech + * alternates list on SpeechRecognitionResult exposes alternate transcriptions of voice + * confidence on SpeechRecognitionResult gives an estimate of confidence in the transcription + * isConfident on SpeechRecognitionResult supports testing confidence + * hasConfidenceRating on SpeechRecognitionResult indicates if confidence was provided from the device + * new SpeechRecognitionWords class gives details on per transcription words and confidence + +### Fix + + * speechRecognizer availabilityDidChange was crashing if invoked due to an invalid parameter type + * Added iOS platform 10 to example Podfile to resolve compilation warnings + +## 0.7.2 + +### Breaking + + * Upgrade Swift to version 5 to match Flutter. Projects using this plugin must now switch to 5. + +## 0.7.1 + +### Fix + + * Upgrade Kotlin to 1.3.5 to match the Flutter 1.12 version + * Upgrade Gradle build to 3.5.0 to match the Flutter 1.12 version + * Android version of the plugin was repeating the system default locale in the `locales` list + +## 0.7.0 + +### New + + * locales method returns the list of available languages for speech + * new optional localeId parameter on listen method supports choosing the comprehension language separately from the current system locale. + +### Breaking + + * `cancel` and `stop` are now async + +## 0.6.3 + +### Fix + + * request permission fix on Android to ensure it doesn't conflict with other requests + +## 0.6.2 + +### Fix + + * channel invoke wasn't being done on the main thread in iOS + +## 0.6.1 + +### Fix + + * listening sound was failing due to timing, now uses play and record mode on iOS. + + ## 0.6.0 +### Breaking + + * The filenames for the optional sounds for iOS have changed. + +### New + + * Added an optional listenFor parameter to set a max duration to listen for speech and then automatically cancel. + +### Fix + + * Was failing to play sounds because of record mode. Now plays sounds before going into record mode and after coming out. + * Status listener was being ignored, now properly notifies on status changes. + +## 0.5.1 + * Fixes a problem where the recognizer left the AVAudioSession in record mode which meant that subsequent sounds couldn't be played. + +## 0.5.0 +Initial draft with limited functionality, supports: + * initializing speech recognition + * asking the user for permission if required + * listening for recognized speech + * canceling the current recognition session + * stopping the current recognition session +* Android and iOS 10+ support + +Missing: + * some error handling + * testing across multiple OS versions + * and more, to be discovered... diff --git a/speech_to_text/LICENSE b/speech_to_text/LICENSE new file mode 100644 index 00000000..7c3991c8 --- /dev/null +++ b/speech_to_text/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2019, Corner Software Development Corp. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/speech_to_text/README.md b/speech_to_text/README.md new file mode 100644 index 00000000..af726f0e --- /dev/null +++ b/speech_to_text/README.md @@ -0,0 +1,150 @@ +# speech_to_text + +[![pub package](https://img.shields.io/badge/pub-v2.3.0-blue)](https://pub.dartlang.org/packages/speech_to_text) [![build status](https://github.com/csdcorp/speech_to_text/workflows/build/badge.svg)](https://github.com/csdcorp/speech_to_text/actions?query=workflow%3Abuild) + +A library that exposes device specific speech recognition capability. + +This plugin contains a set of classes that make it easy to use the speech recognition +capabilities of the mobile device in Flutter. It supports both Android and iOS. The +target use cases for this library are commands and short phrases, not continuous spoken +conversion or always on listening. + +## Recent Updates + +The 2.3.0 version adds `SpeechToTextProvider` as a simpler way to interact with the plugin. Checkout +the new `provider_example.dart` for intended usage. + +The 2.2.0 version improves audio session handling and start / stop sound playback on iOS. + +*Note*: Feedback from any test devices is welcome. + +## Using + +To recognize text from the microphone import the package and call the plugin, like so: + +```dart +import 'package:speech_to_text/speech_to_text.dart' as stt; + + stt.SpeechToText speech = stt.SpeechToText(); + bool available = await speech.initialize( onStatus: statusListener, onError: errorListener ); + if ( available ) { + speech.listen( onResult: resultListener ); + } + else { + print("The user has denied the use of speech recognition."); + } + // some time later... + speech.stop() +``` + +### Initialize once +The `initialize` method only needs to be called once per application session. After that `listen`, +`start`, `stop`, and `cancel` can be used to interact with the plugin. Subsequent calls to `initialize` +are ignored which is safe but does mean that the `onStatus` and `onError` callbacks cannot be reset after +the first call to `initialize`. For that reason there should be only one instance of the plugin per +application. The `SpeechToTextProvider` is one way to create a single instance and easily reuse it in +multiple widgets. + +## Permissions + +Applications using this plugin require user permissions. +### iOS + +Add the following keys to your _Info.plist_ file, located in `/ios/Runner/Info.plist`: + +* `NSSpeechRecognitionUsageDescription` - describe why your app uses speech recognition. This is called _Privacy - Speech Recognition Usage Description_ in the visual editor. +* `NSMicrophoneUsageDescription` - describe why your app needs access to the microphone. This is called _Privacy - Microphone Usage Description_ in the visual editor. + +### Android + +Add the record audio permission to your _AndroidManifest.xml_ file, located in `/android/app/src/main/AndroidManifest.xml`. + +* `android.permission.RECORD_AUDIO` - this permission is required for microphone access. +* `android.permission.INTERNET` - this permission is required because speech recognition may use remote services. + +## Adding Sounds for iOS (optional) + +Android automatically plays system sounds when speech listening starts or stops but iOS does not. This plugin supports playing sounds to indicate listening status on iOS if sound files are available as assets in the application. To enable sounds in an application using this plugin add the sound files to the project and reference them in the assets section of the application `pubspec.yaml`. The location and filenames of the sound files must exactly match what +is shown below or they will not be found. The example application for the plugin shows the usage. *Note* These files should be very short as they delay +the start / end of the speech recognizer until the sound playback is complete. +```yaml + assets: + - assets/sounds/speech_to_text_listening.m4r + - assets/sounds/speech_to_text_cancel.m4r + - assets/sounds/speech_to_text_stop.m4r +``` +* `speech_to_text_listening.m4r` - played when the listen method is called. +* `speech_to_text_cancel.m4r` - played when the cancel method is called. +* `speech_to_text_stop.m4r` - played when the stop method is called. + +## Troubleshooting + +### SDK version error trying to compile for Android +``` +Manifest merger failed : uses-sdk:minSdkVersion 16 cannot be smaller than version 21 declared in library [:speech_to_text] +``` +The speech_to_text plugin requires at least Android SDK 21 because some of the speech functions in Android +were only introduced in that version. To fix this error you need to change the `build.gradle` entry to reflect +this version. Here's what the relevant part of that file looked like as of this writing: +``` + defaultConfig { + applicationId "com.example.app" + minSdkVersion 21 + targetSdkVersion 28 + versionCode flutterVersionCode.toInteger() + versionName flutterVersionName + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + } +``` + +### Incorrect Swift version trying to compile for iOS +``` +/Users/markvandergon/flutter/.pub-cache/hosted/pub.dartlang.org/speech_to_text-1.1.0/ios/Classes/SwiftSpeechToTextPlugin.swift:224:44: error: value of type 'SwiftSpeechToTextPlugin' has no member 'AVAudioSession' + rememberedAudioCategory = self.AVAudioSession.Category + ~~~~ ^~~~~~~~~~~~~~ + /Users/markvandergon/flutter/.pub-cache/hosted/pub.dartlang.org/speech_to_text-1.1.0/ios/Classes/SwiftSpeechToTextPlugin.swift:227:63: error: type 'Int' has no member 'notifyOthersOnDeactivation' + try self.audioSession.setActive(true, withFlags: .notifyOthersOnDeactivation) +``` +This happens when the Swift language version is not set correctly. See this thread for help https://github.com/csdcorp/speech_to_text/issues/45. + +### Swift not supported trying to compile for iOS +``` +`speech_to_text` does not specify a Swift version and none of the targets (`Runner`) integrating it have the `SWIFT_VERSION` attribute set. +``` +This usually happens for older projects that only support Objective-C. See this thread for help https://github.com/csdcorp/speech_to_text/issues/88. + +### Not working on a particular Android device +The symptom for this issue is that the `initialize` method will always fail. If you turn on debug logging +using the `debugLogging: true` flag on the `initialize` method you'll see `'Speech recognition unavailable'` +in the Android log. There's a lengthy issue discussion here https://github.com/csdcorp/speech_to_text/issues/36 +about this. The issue seems to be that the recognizer is not always automatically enabled on the device. Two +key things helped resolve the issue in this case at least. + +#### First +1. Go to Google Play +2. Search for 'Google' +3. You should find this app: https://play.google.com/store/apps/details?id=com.google.android.googlequicksearchbox +If 'Disabled' enable it + +This is the SO post that helped: https://stackoverflow.com/questions/28769320/how-to-check-wether-speech-recognition-is-available-or-not + +#### Second +Ensure the app has the required permissions. The symptom for this that you get a permanent error notification + 'error_audio_error` when starting a listen session. Here's a Stack Overflow post that addresses that + https://stackoverflow.com/questions/46376193/android-speechrecognizer-audio-recording-error + Here's the important excerpt: + >You should go to system setting, Apps, Google app, then enable its permission of microphone. + +### iOS recognition guidelines +Apple has quite a good guide on the user experience for using speech, the original is here +https://developer.apple.com/documentation/speech/sfspeechrecognizer This is the section that I think is particularly relevant: + +>#### Create a Great User Experience for Speech Recognition +>Here are some tips to consider when adding speech recognition support to your app. + +>**Be prepared to handle failures caused by speech recognition limits.** Because speech recognition is a network-based service, limits are enforced so that the service can remain freely available to all apps. Individual devices may be limited in the number of recognitions that can be performed per day, and each app may be throttled globally based on the number of requests it makes per day. If a recognition request fails quickly (within a second or two of starting), check to see if the recognition service became unavailable. If it is, you may want to ask users to try again later. + +>**Plan for a one-minute limit on audio duration.** Speech recognition places a relatively high burden on battery life and network usage. To minimize this burden, the framework stops speech recognition tasks that last longer than one minute. This limit is similar to the one for keyboard-related dictation. +Remind the user when your app is recording. For example, display a visual indicator and play sounds at the beginning and end of speech recognition to help users understand that they're being actively recorded. You can also display speech as it is being recognized so that users understand what your app is doing and see any mistakes made during the recognition process. + +>**Do not perform speech recognition on private or sensitive information.** Some speech is not appropriate for recognition. Don't send passwords, health or financial data, and other sensitive speech for recognition. diff --git a/speech_to_text/android/.classpath b/speech_to_text/android/.classpath new file mode 100644 index 00000000..eb19361b --- /dev/null +++ b/speech_to_text/android/.classpath @@ -0,0 +1,6 @@ + + + + + + diff --git a/speech_to_text/android/.gitignore b/speech_to_text/android/.gitignore new file mode 100644 index 00000000..c6cbe562 --- /dev/null +++ b/speech_to_text/android/.gitignore @@ -0,0 +1,8 @@ +*.iml +.gradle +/local.properties +/.idea/workspace.xml +/.idea/libraries +.DS_Store +/build +/captures diff --git a/speech_to_text/android/.project b/speech_to_text/android/.project new file mode 100644 index 00000000..3050653c --- /dev/null +++ b/speech_to_text/android/.project @@ -0,0 +1,23 @@ + + + speech_to_text + Project android_____ created by Buildship. + + + + + org.eclipse.jdt.core.javabuilder + + + + + org.eclipse.buildship.core.gradleprojectbuilder + + + + + + org.eclipse.jdt.core.javanature + org.eclipse.buildship.core.gradleprojectnature + + diff --git a/speech_to_text/android/.settings/org.eclipse.buildship.core.prefs b/speech_to_text/android/.settings/org.eclipse.buildship.core.prefs new file mode 100644 index 00000000..7a23d112 --- /dev/null +++ b/speech_to_text/android/.settings/org.eclipse.buildship.core.prefs @@ -0,0 +1,13 @@ +arguments= +auto.sync=false +build.scans.enabled=false +connection.gradle.distribution=GRADLE_DISTRIBUTION(VERSION(5.6.1)) +connection.project.dir= +eclipse.preferences.version=1 +gradle.user.home= +java.home= +jvm.arguments= +offline.mode=false +override.workspace.settings=true +show.console.view=true +show.executions.view=true diff --git a/speech_to_text/android/build.gradle b/speech_to_text/android/build.gradle new file mode 100644 index 00000000..6b23b300 --- /dev/null +++ b/speech_to_text/android/build.gradle @@ -0,0 +1,44 @@ +group 'com.csdcorp.speech_to_text' +version '1.0-SNAPSHOT' + +buildscript { + ext.kotlin_version = '1.3.50' + repositories { + google() + jcenter() + } + + dependencies { + classpath 'com.android.tools.build:gradle:3.5.0' + classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" + } +} + +rootProject.allprojects { + repositories { + google() + jcenter() + } +} + +apply plugin: 'com.android.library' +apply plugin: 'kotlin-android' + +android { + compileSdkVersion 28 + + sourceSets { + main.java.srcDirs += 'src/main/kotlin' + } + defaultConfig { + minSdkVersion 21 + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + } + lintOptions { + disable 'InvalidPackage' + } +} + +dependencies { + implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version" +} diff --git a/speech_to_text/android/gradle/gradle.properties b/speech_to_text/android/gradle/gradle.properties new file mode 100644 index 00000000..94adc3a3 --- /dev/null +++ b/speech_to_text/android/gradle/gradle.properties @@ -0,0 +1,3 @@ +org.gradle.jvmargs=-Xmx1536M +android.useAndroidX=true +android.enableJetifier=true diff --git a/speech_to_text/android/gradle/local.properties b/speech_to_text/android/gradle/local.properties new file mode 100644 index 00000000..b85628e7 --- /dev/null +++ b/speech_to_text/android/gradle/local.properties @@ -0,0 +1,2 @@ +sdk.dir=/Users/stephen.owens/Library/Android/sdk +flutter.sdk=/Users/stephen.owens/Documents/dev/flutter/sdk/flutter \ No newline at end of file diff --git a/speech_to_text/android/gradle/settings.gradle b/speech_to_text/android/gradle/settings.gradle new file mode 100644 index 00000000..cdfc1c4b --- /dev/null +++ b/speech_to_text/android/gradle/settings.gradle @@ -0,0 +1 @@ +rootProject.name = 'speech_to_text' diff --git a/speech_to_text/android/gradle/wrapper/gradle-wrapper.properties b/speech_to_text/android/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 00000000..674bdda0 --- /dev/null +++ b/speech_to_text/android/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,5 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip diff --git a/speech_to_text/android/src/main/AndroidManifest.xml b/speech_to_text/android/src/main/AndroidManifest.xml new file mode 100644 index 00000000..61a73f32 --- /dev/null +++ b/speech_to_text/android/src/main/AndroidManifest.xml @@ -0,0 +1,3 @@ + + diff --git a/speech_to_text/android/src/main/kotlin/com/csdcorp/speech_to_text/SpeechToTextPlugin.kt b/speech_to_text/android/src/main/kotlin/com/csdcorp/speech_to_text/SpeechToTextPlugin.kt new file mode 100644 index 00000000..7954add3 --- /dev/null +++ b/speech_to_text/android/src/main/kotlin/com/csdcorp/speech_to_text/SpeechToTextPlugin.kt @@ -0,0 +1,595 @@ +package com.csdcorp.speech_to_text + +import androidx.annotation.NonNull; +import io.flutter.embedding.engine.plugins.FlutterPlugin +import android.Manifest +import android.annotation.TargetApi +import android.app.Activity +import android.content.Intent +import android.content.pm.PackageManager +import android.os.Build +import android.os.Bundle +import android.speech.RecognitionListener +import android.speech.SpeechRecognizer.createSpeechRecognizer +import android.speech.RecognizerIntent +import android.speech.SpeechRecognizer +import androidx.core.app.ActivityCompat +import androidx.core.content.ContextCompat +import io.flutter.plugin.common.MethodCall +import io.flutter.plugin.common.MethodChannel +import io.flutter.plugin.common.MethodChannel.MethodCallHandler +import io.flutter.plugin.common.MethodChannel.Result +import io.flutter.plugin.common.PluginRegistry +import io.flutter.plugin.common.PluginRegistry.Registrar +import org.json.JSONObject +import android.content.Context +import android.content.BroadcastReceiver +import android.os.Handler +import android.os.Looper +import android.util.Log +import io.flutter.embedding.engine.plugins.activity.ActivityAware +import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding +import io.flutter.plugin.common.BinaryMessenger +import org.json.JSONArray +import java.util.* + + +enum class SpeechToTextErrors { + multipleRequests, + unimplemented, + noLanguageIntent, + recognizerNotAvailable, + missingOrInvalidArg, + unknown +} + +enum class SpeechToTextCallbackMethods { + textRecognition, + notifyStatus, + notifyError, + soundLevelChange, +} + +enum class SpeechToTextStatus { + listening, + notListening, + unavailable, + available, +} + +enum class ListenMode { + deviceDefault, + dictation, + search, + confirmation, +} + +const val pluginChannelName = "plugin.csdcorp.com/speech_to_text" + +@TargetApi(8) +/** SpeechToTextPlugin */ +public class SpeechToTextPlugin : + MethodCallHandler, RecognitionListener, + PluginRegistry.RequestPermissionsResultListener, FlutterPlugin, + ActivityAware { + private var pluginContext: Context? = null + private var channel: MethodChannel? = null + private val minSdkForSpeechSupport = 21 + private val speechToTextPermissionCode = 28521 + private val missingConfidence: Double = -1.0 + private val logTag = "SpeechToTextPlugin" + private var currentActivity: Activity? = null + private var activeResult: Result? = null + private var initializedSuccessfully: Boolean = false + private var permissionToRecordAudio: Boolean = false + private var listening = false + private var debugLogging: Boolean = false + private var speechRecognizer: SpeechRecognizer? = null + private var recognizerIntent: Intent? = null + private var previousRecognizerLang: String? = null + private var previousPartialResults: Boolean = true + private var previousListenMode: ListenMode = ListenMode.deviceDefault + private var lastFinalTime: Long = 0 + private val handler: Handler = Handler(Looper.getMainLooper()) + private val defaultLanguageTag: String = Locale.getDefault().toLanguageTag() + + override fun onAttachedToEngine(@NonNull flutterPluginBinding: FlutterPlugin.FlutterPluginBinding) { + + onAttachedToEngine(flutterPluginBinding.getApplicationContext(), flutterPluginBinding.getBinaryMessenger()); + } + + // This static function is optional and equivalent to onAttachedToEngine. It supports the old + // pre-Flutter-1.12 Android projects. You are encouraged to continue supporting + // plugin registration via this function while apps migrate to use the new Android APIs + // post-flutter-1.12 via https://flutter.dev/go/android-project-migration. + // + // It is encouraged to share logic between onAttachedToEngine and registerWith to keep + // them functionally equivalent. Only one of onAttachedToEngine or registerWith will be called + // depending on the user's project. onAttachedToEngine or registerWith must both be defined + // in the same class. + companion object { + @JvmStatic + fun registerWith(registrar: Registrar) { + val speechPlugin = SpeechToTextPlugin() + speechPlugin.currentActivity = registrar.activity() + registrar.addRequestPermissionsResultListener(speechPlugin) + speechPlugin.onAttachedToEngine(registrar.context(), registrar.messenger()) + } + } + + private fun onAttachedToEngine(applicationContext: Context, messenger: BinaryMessenger) { + this.pluginContext = applicationContext; + channel = MethodChannel(messenger, pluginChannelName) + channel?.setMethodCallHandler(this) + } + + override fun onDetachedFromEngine(@NonNull binding: FlutterPlugin.FlutterPluginBinding) { + this.pluginContext = null; + channel?.setMethodCallHandler(null) + channel = null + } + + override fun onDetachedFromActivity() { + currentActivity = null + } + + override fun onReattachedToActivityForConfigChanges(binding: ActivityPluginBinding) { + currentActivity = binding.activity + binding.addRequestPermissionsResultListener(this) + } + + override fun onAttachedToActivity(binding: ActivityPluginBinding) { + currentActivity = binding.activity + binding.addRequestPermissionsResultListener(this) + } + + override fun onDetachedFromActivityForConfigChanges() { + currentActivity = null + } + + override fun onMethodCall(@NonNull call: MethodCall, @NonNull rawrResult: Result) { + val result = ChannelResultWrapper(rawrResult) + try { + when (call.method) { + "has_permission" -> hasPermission(result) + "initialize" -> { + var dlog = call.argument("debugLogging") + if (null != dlog) { + debugLogging = dlog + } + initialize(result) + } + "listen" -> { + var localeId = call.argument("localeId") + if (null == localeId) { + localeId = defaultLanguageTag + } + var partialResults = call.argument("partialResults") + if (null == partialResults) { + partialResults = true + } + val listenModeIndex = call.argument("listenMode") + if ( null == listenModeIndex ) { + result.error(SpeechToTextErrors.missingOrInvalidArg.name, + "listenMode is required", null) + return + } + startListening(result, localeId, partialResults, listenModeIndex ) + } + "stop" -> stopListening(result) + "cancel" -> cancelListening(result) + "locales" -> locales(result) + else -> result.notImplemented() + } + } catch (exc: Exception) { + Log.e(logTag, "Unexpected exception", exc) + result.error(SpeechToTextErrors.unknown.name, + "Unexpected exception", exc.localizedMessage) + } + } + + private fun hasPermission(result: Result) { + if (sdkVersionTooLow(result)) { + return + } + debugLog("Start has_permission") + val localContext = pluginContext + if (localContext != null) { + val hasPerm = ContextCompat.checkSelfPermission(localContext, + Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED + result.success(hasPerm) + } + } + + private fun initialize(result: Result) { + if (sdkVersionTooLow(result)) { + return + } + debugLog("Start initialize") + if (null != activeResult) { + result.error(SpeechToTextErrors.multipleRequests.name, + "Only one initialize at a time", null) + return + } + activeResult = result + val localContext = pluginContext + initializeIfPermitted(pluginContext) + } + + private fun sdkVersionTooLow(result: Result): Boolean { + if (Build.VERSION.SDK_INT < minSdkForSpeechSupport) { + result.success(false) + return true; + } + return false; + } + + private fun isNotInitialized(result: Result): Boolean { + if (!initializedSuccessfully || null == pluginContext) { + result.success(false) + } + return !initializedSuccessfully + } + + private fun isListening(): Boolean { + return listening + } + + private fun isNotListening(): Boolean { + return !listening + } + + private fun startListening(result: Result, languageTag: String, partialResults: Boolean, + listenModeIndex: Int) { + if (sdkVersionTooLow(result) || isNotInitialized(result) || isListening()) { + return + } + debugLog("Start listening") + var listenMode = ListenMode.deviceDefault + if ( listenModeIndex == ListenMode.dictation.ordinal) { + listenMode = ListenMode.dictation + } + setupRecognizerIntent(languageTag, partialResults, listenMode) + handler.post { + run { + speechRecognizer?.startListening(recognizerIntent) + } + } + notifyListening(isRecording = true) + result.success(true) + debugLog("Start listening done") + } + + private fun stopListening(result: Result) { + if (sdkVersionTooLow(result) || isNotInitialized(result) || isNotListening()) { + return + } + debugLog("Stop listening") + handler.post { + run { + speechRecognizer?.stopListening() + } + } + notifyListening(isRecording = false) + result.success(true) + debugLog("Stop listening done") + } + + private fun cancelListening(result: Result) { + if (sdkVersionTooLow(result) || isNotInitialized(result) || isNotListening()) { + return + } + debugLog("Cancel listening") + handler.post { + run { + speechRecognizer?.cancel() + } + } + notifyListening(isRecording = false) + result.success(true) + debugLog("Cancel listening done") + } + + private fun locales(result: Result) { + if (sdkVersionTooLow(result) || isNotInitialized(result)) { + return + } + var detailsIntent = RecognizerIntent.getVoiceDetailsIntent(pluginContext) + if (null == detailsIntent) { + detailsIntent = Intent(RecognizerIntent.ACTION_GET_LANGUAGE_DETAILS) + } + if (null == detailsIntent) { + result.error(SpeechToTextErrors.noLanguageIntent.name, + "Could not get voice details", null) + return + } + pluginContext?.sendOrderedBroadcast( + detailsIntent, null, LanguageDetailsChecker(result), + null, Activity.RESULT_OK, null, null) + } + + private fun notifyListening(isRecording: Boolean) { + debugLog("Notify listening") + listening = isRecording + val status = when (isRecording) { + true -> SpeechToTextStatus.listening.name + false -> SpeechToTextStatus.notListening.name + } + channel?.invokeMethod(SpeechToTextCallbackMethods.notifyStatus.name, status) + debugLog("Notify listening done") + } + + private fun updateResults(speechBundle: Bundle?, isFinal: Boolean) { + if (isDuplicateFinal( isFinal )) { + debugLog("Discarding duplicate final") + return + } + val userSaid = speechBundle?.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION) + if (null != userSaid && userSaid.isNotEmpty()) { + val speechResult = JSONObject() + speechResult.put("finalResult", isFinal) + val confidence = speechBundle?.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES) + val alternates = JSONArray() + for (resultIndex in 0..userSaid.size - 1) { + val speechWords = JSONObject() + speechWords.put("recognizedWords", userSaid[resultIndex]) + if (null != confidence && confidence.size >= userSaid.size) { + speechWords.put("confidence", confidence[resultIndex]) + } else { + speechWords.put("confidence", missingConfidence) + } + alternates.put(speechWords) + } + speechResult.put("alternates", alternates) + val jsonResult = speechResult.toString() + debugLog("Calling results callback") + channel?.invokeMethod(SpeechToTextCallbackMethods.textRecognition.name, + jsonResult) + } + } + + private fun isDuplicateFinal( isFinal: Boolean ) : Boolean { + if ( !isFinal ) { + return false + } + val delta = System.currentTimeMillis() - lastFinalTime + lastFinalTime = System.currentTimeMillis() + return delta >= 0 && delta < 100 + } + + private fun initializeIfPermitted(context: Context?) { + val localContext = context + if (null == localContext) { + completeInitialize() + return + } + permissionToRecordAudio = ContextCompat.checkSelfPermission(localContext, + Manifest.permission.RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED + debugLog("Checked permission") + if (!permissionToRecordAudio) { + val localActivity = currentActivity + if (null != localActivity) { + debugLog("Requesting permission") + ActivityCompat.requestPermissions(localActivity, + arrayOf(Manifest.permission.RECORD_AUDIO), speechToTextPermissionCode) + } else { + debugLog("no permission, no activity, completing") + completeInitialize() + } + } else { + debugLog("has permission, completing") + completeInitialize() + } + debugLog("leaving initializeIfPermitted") + } + + private fun completeInitialize() { + + debugLog("completeInitialize") + if (permissionToRecordAudio) { + debugLog("Testing recognition availability") + if (!SpeechRecognizer.isRecognitionAvailable(pluginContext)) { + Log.e(logTag, "Speech recognition not available on this device") + activeResult?.error(SpeechToTextErrors.recognizerNotAvailable.name, + "Speech recognition not available on this device", "") + activeResult = null + return + } + + debugLog("Creating recognizer") + speechRecognizer = createSpeechRecognizer(pluginContext).apply { + debugLog("Setting listener") + setRecognitionListener(this@SpeechToTextPlugin) + } + if (null == speechRecognizer) { + Log.e(logTag, "Speech recognizer null") + activeResult?.error( + SpeechToTextErrors.recognizerNotAvailable.name, + "Speech recognizer null", "") + activeResult = null + } + + debugLog("before setup intent") + setupRecognizerIntent(defaultLanguageTag, true, ListenMode.deviceDefault) + debugLog("after setup intent") + } + + initializedSuccessfully = permissionToRecordAudio + debugLog("sending result") + activeResult?.success(permissionToRecordAudio) + debugLog("leaving complete") + activeResult = null + } + + private fun setupRecognizerIntent(languageTag: String, partialResults: Boolean, listenMode: ListenMode) { + debugLog("setupRecognizerIntent") + if (previousRecognizerLang == null || + previousRecognizerLang != languageTag || + partialResults != previousPartialResults || previousListenMode != listenMode ) { + previousRecognizerLang = languageTag; + previousPartialResults = partialResults + previousListenMode = listenMode + handler.post { + run { + recognizerIntent = Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH).apply { + debugLog("In RecognizerIntent apply") + putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM) + debugLog("put model") + val localContext = pluginContext + if (null != localContext) { + putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, + localContext.applicationInfo.packageName) + } + debugLog("put package") + putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, partialResults) + debugLog("put partial") + if (languageTag != Locale.getDefault().toLanguageTag()) { + putExtra(RecognizerIntent.EXTRA_LANGUAGE, languageTag); + debugLog("put languageTag") + } + } + } + } + } + } + + override fun onRequestPermissionsResult(requestCode: Int, permissions: Array?, + grantResults: IntArray?): Boolean { + when (requestCode) { + speechToTextPermissionCode -> { + if (null != grantResults) { + permissionToRecordAudio = grantResults.isNotEmpty() && + grantResults.get(0) == PackageManager.PERMISSION_GRANTED + } + completeInitialize() + return true + } + } + return false + } + + + override fun onPartialResults(results: Bundle?) = updateResults(results, false) + override fun onResults(results: Bundle?) = updateResults(results, true) + override fun onEndOfSpeech() = notifyListening(isRecording = false) + + override fun onError(errorCode: Int) { + val errorMsg = when (errorCode) { + SpeechRecognizer.ERROR_AUDIO -> "error_audio_error" + SpeechRecognizer.ERROR_CLIENT -> "error_client" + SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS -> "error_permission" + SpeechRecognizer.ERROR_NETWORK -> "error_network" + SpeechRecognizer.ERROR_NETWORK_TIMEOUT -> "error_network_timeout" + SpeechRecognizer.ERROR_NO_MATCH -> "error_no_match" + SpeechRecognizer.ERROR_RECOGNIZER_BUSY -> "error_busy" + SpeechRecognizer.ERROR_SERVER -> "error_server" + SpeechRecognizer.ERROR_SPEECH_TIMEOUT -> "error_speech_timeout" + else -> "error_unknown" + } + sendError(errorMsg) + } + + private fun debugLog( msg: String ) { + if ( debugLogging ) { + Log.d( logTag, msg ) + } + } + + private fun sendError(errorMsg: String) { + val speechError = JSONObject() + speechError.put("errorMsg", errorMsg) + speechError.put("permanent", true) + handler.post { + run { + channel?.invokeMethod(SpeechToTextCallbackMethods.notifyError.name, speechError.toString()) + } + } + } + + override fun onRmsChanged(rmsdB: Float) { + handler.post { + run { + channel?.invokeMethod(SpeechToTextCallbackMethods.soundLevelChange.name, rmsdB) + } + } + } + + override fun onReadyForSpeech(p0: Bundle?) {} + override fun onBufferReceived(p0: ByteArray?) {} + override fun onEvent(p0: Int, p1: Bundle?) {} + override fun onBeginningOfSpeech() {} +} + +// See https://stackoverflow.com/questions/10538791/how-to-set-the-language-in-speech-recognition-on-android/10548680#10548680 +class LanguageDetailsChecker(flutterResult: Result) : BroadcastReceiver() { + private val result: Result = flutterResult + private var supportedLanguages: List? = null + + private var languagePreference: String? = null + + override fun onReceive(context: Context, intent: Intent) { + val results = getResultExtras(true) + if (results.containsKey(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE)) { + languagePreference = results.getString(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE) + } + if (results.containsKey(RecognizerIntent.EXTRA_SUPPORTED_LANGUAGES)) { + supportedLanguages = results.getStringArrayList( + RecognizerIntent.EXTRA_SUPPORTED_LANGUAGES) + createResponse(supportedLanguages) + } + } + + private fun createResponse(supportedLanguages: List?) { + val currentLocale = Locale.getDefault() + val localeNames = ArrayList() + localeNames.add(buildIdNameForLocale(currentLocale)) + if (null != supportedLanguages) { + for (lang in supportedLanguages) { + if (currentLocale.toLanguageTag() == lang) { + continue + } + val locale = Locale.forLanguageTag(lang) + localeNames.add(buildIdNameForLocale(locale)) + } + } + result.success(localeNames) + + } + + private fun buildIdNameForLocale(locale: Locale): String { + val name = locale.displayName.replace(':', ' ') + return "${locale.language}_${locale.country}:$name" + } +} + +private class ChannelResultWrapper(result: Result) : Result { + // Caller handler + val handler: Handler = Handler(Looper.getMainLooper()) + val result: Result = result + + // make sure to respond in the caller thread + override fun success(results: Any?) { + + handler.post { + run { + result.success(results); + } + } + } + + override fun error(errorCode: String?, errorMessage: String?, data: Any?) { + handler.post { + run { + result.error(errorCode, errorMessage, data); + } + } + } + + override fun notImplemented() { + handler.post { + run { + result.notImplemented(); + } + } + } +} diff --git a/speech_to_text/example/.gitignore b/speech_to_text/example/.gitignore new file mode 100644 index 00000000..2ddde2a5 --- /dev/null +++ b/speech_to_text/example/.gitignore @@ -0,0 +1,73 @@ +# Miscellaneous +*.class +*.log +*.pyc +*.swp +.DS_Store +.atom/ +.buildlog/ +.history +.svn/ + +# IntelliJ related +*.iml +*.ipr +*.iws +.idea/ + +# The .vscode folder contains launch configuration and tasks you configure in +# VS Code which you may wish to be included in version control, so this line +# is commented out by default. +#.vscode/ + +# Flutter/Dart/Pub related +**/doc/api/ +.dart_tool/ +.flutter-plugins +.packages +.pub-cache/ +.pub/ +/build/ + +# Android related +**/android/**/gradle-wrapper.jar +**/android/.gradle +**/android/captures/ +**/android/gradlew +**/android/gradlew.bat +**/android/local.properties +**/android/**/GeneratedPluginRegistrant.java + +# iOS/XCode related +**/ios/**/*.mode1v3 +**/ios/**/*.mode2v3 +**/ios/**/*.moved-aside +**/ios/**/*.pbxuser +**/ios/**/*.perspectivev3 +**/ios/**/*sync/ +**/ios/**/.sconsign.dblite +**/ios/**/.tags* +**/ios/**/.vagrant/ +**/ios/**/DerivedData/ +**/ios/**/Icon? +**/ios/**/Pods/ +**/ios/**/.symlinks/ +**/ios/**/profile +**/ios/**/xcuserdata +**/ios/.generated/ +**/ios/Flutter/App.framework +**/ios/Flutter/Flutter.framework +**/ios/Flutter/Generated.xcconfig +**/ios/Flutter/app.flx +**/ios/Flutter/app.zip +**/ios/Flutter/flutter_assets/ +**/ios/Flutter/flutter_export_environment.sh +**/ios/ServiceDefinitions.json +**/ios/Runner/GeneratedPluginRegistrant.* + +# Exceptions to above rules. +!**/ios/**/default.mode1v3 +!**/ios/**/default.mode2v3 +!**/ios/**/default.pbxuser +!**/ios/**/default.perspectivev3 +!/packages/flutter_tools/test/data/dart_dependencies_test/**/.packages diff --git a/speech_to_text/example/.metadata b/speech_to_text/example/.metadata new file mode 100644 index 00000000..aeb01ee2 --- /dev/null +++ b/speech_to_text/example/.metadata @@ -0,0 +1,10 @@ +# This file tracks properties of this Flutter project. +# Used by Flutter tool to assess capabilities and perform upgrades etc. +# +# This file should be version controlled and should not be manually edited. + +version: + revision: 2d2a1ffec95cc70a3218872a2cd3f8de4933c42f + channel: stable + +project_type: app diff --git a/speech_to_text/example/README.md b/speech_to_text/example/README.md new file mode 100644 index 00000000..92252821 --- /dev/null +++ b/speech_to_text/example/README.md @@ -0,0 +1,155 @@ +# speech_to_text_example + +Demonstrates how to use the speech_to_text plugin. This example requires +that the plugin has been installed. It initializes speech recognition, +listens for words and prints them. + + +## Source + +```dart +import 'package:flutter/material.dart'; +import 'dart:async'; + +import 'package:speech_to_text/speech_to_text.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; +import 'package:speech_to_text/speech_recognition_error.dart'; + +void main() => runApp(MyApp()); + +class MyApp extends StatefulWidget { + @override + _MyAppState createState() => _MyAppState(); +} + +class _MyAppState extends State { + bool _hasSpeech = false; + String lastWords = ""; + String lastError = ""; + String lastStatus = ""; + final SpeechToText speech = SpeechToText(); + + @override + void initState() { + super.initState(); + initSpeechState(); + } + + Future initSpeechState() async { + bool hasSpeech = await speech.initialize(onError: errorListener, onStatus: statusListener ); + + if (!mounted) return; + setState(() { + _hasSpeech = hasSpeech; + }); + } + + @override + Widget build(BuildContext context) { + return MaterialApp( + home: Scaffold( + appBar: AppBar( + title: const Text('Speech to Text Example'), + ), + body: _hasSpeech + ? Column(children: [ + Expanded( + child: Center( + child: Text('Speech recognition available'), + ), + ), + Expanded( + child: Row( + mainAxisAlignment: MainAxisAlignment.center, + children: [ + FlatButton( + child: Text('Start'), + onPressed: startListening, + ), + FlatButton( + child: Text('Stop'), + onPressed: stopListening, + ), + FlatButton( + child: Text('Cancel'), + onPressed:cancelListening, + ), + ], + ), + ), + Expanded( + child: Column( + children: [ + Center( + child: Text('Recognized Words'), + ), + Center( + child: Text(lastWords), + ), + ], + ), + ), + Expanded( + child: Column( + children: [ + Center( + child: Text('Error'), + ), + Center( + child: Text(lastError), + ), + ], + ), + ), + Expanded( + child: Center( + child: speech.isListening ? Text("I'm listening...") : Text( 'Not listening' ), + ), + ), + ]) + : Center( child: Text('Speech recognition unavailable', style: TextStyle(fontSize: 20.0, fontWeight: FontWeight.bold))), + ), + ); + } + + void startListening() { + lastWords = ""; + lastError = ""; + speech.listen(onResult: resultListener ); + setState(() { + + }); + } + + void stopListening() { + speech.stop( ); + setState(() { + + }); + } + + void cancelListening() { + speech.cancel( ); + setState(() { + + }); + } + + void resultListener(SpeechRecognitionResult result) { + setState(() { + lastWords = "${result.recognizedWords} - ${result.finalResult}"; + }); + } + + void errorListener(SpeechRecognitionError error ) { + setState(() { + lastError = "${error.errorMsg} - ${error.permanent}"; + }); + } + void statusListener(String status ) { + setState(() { + lastStatus = "$status"; + }); + } +} +``` \ No newline at end of file diff --git a/speech_to_text/example/android/.project b/speech_to_text/example/android/.project new file mode 100644 index 00000000..d7d48141 --- /dev/null +++ b/speech_to_text/example/android/.project @@ -0,0 +1,17 @@ + + + android___ + Project android___ created by Buildship. + + + + + org.eclipse.buildship.core.gradleprojectbuilder + + + + + + org.eclipse.buildship.core.gradleprojectnature + + diff --git a/speech_to_text/example/android/.settings/org.eclipse.buildship.core.prefs b/speech_to_text/example/android/.settings/org.eclipse.buildship.core.prefs new file mode 100644 index 00000000..e8895216 --- /dev/null +++ b/speech_to_text/example/android/.settings/org.eclipse.buildship.core.prefs @@ -0,0 +1,2 @@ +connection.project.dir= +eclipse.preferences.version=1 diff --git a/speech_to_text/example/android/app/build.gradle b/speech_to_text/example/android/app/build.gradle new file mode 100644 index 00000000..8b616f29 --- /dev/null +++ b/speech_to_text/example/android/app/build.gradle @@ -0,0 +1,67 @@ +def localProperties = new Properties() +def localPropertiesFile = rootProject.file('local.properties') +if (localPropertiesFile.exists()) { + localPropertiesFile.withReader('UTF-8') { reader -> + localProperties.load(reader) + } +} + +def flutterRoot = localProperties.getProperty('flutter.sdk') +if (flutterRoot == null) { + throw new GradleException("Flutter SDK not found. Define location with flutter.sdk in the local.properties file.") +} + +def flutterVersionCode = localProperties.getProperty('flutter.versionCode') +if (flutterVersionCode == null) { + flutterVersionCode = '1' +} + +def flutterVersionName = localProperties.getProperty('flutter.versionName') +if (flutterVersionName == null) { + flutterVersionName = '1.0' +} + +apply plugin: 'com.android.application' +apply plugin: 'kotlin-android' +apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle" + +android { + compileSdkVersion 28 + + sourceSets { + main.java.srcDirs += 'src/main/kotlin' + } + + lintOptions { + disable 'InvalidPackage' + } + + defaultConfig { + // TODO: Specify your own unique Application ID (https://developer.android.com/studio/build/application-id.html). + applicationId "com.csdcorp.speech_to_text_example" + minSdkVersion 21 + targetSdkVersion 28 + versionCode flutterVersionCode.toInteger() + versionName flutterVersionName + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + } + + buildTypes { + release { + // TODO: Add your own signing config for the release build. + // Signing with the debug keys for now, so `flutter run --release` works. + signingConfig signingConfigs.debug + } + } +} + +flutter { + source '../..' +} + +dependencies { + implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version" + testImplementation 'junit:junit:4.12' + androidTestImplementation 'androidx.test:runner:1.1.1' + androidTestImplementation 'androidx.test.espresso:espresso-core:3.1.1' +} diff --git a/speech_to_text/example/android/app/src/debug/AndroidManifest.xml b/speech_to_text/example/android/app/src/debug/AndroidManifest.xml new file mode 100644 index 00000000..36edf838 --- /dev/null +++ b/speech_to_text/example/android/app/src/debug/AndroidManifest.xml @@ -0,0 +1,7 @@ + + + + diff --git a/speech_to_text/example/android/app/src/main/AndroidManifest.xml b/speech_to_text/example/android/app/src/main/AndroidManifest.xml new file mode 100644 index 00000000..b0912061 --- /dev/null +++ b/speech_to_text/example/android/app/src/main/AndroidManifest.xml @@ -0,0 +1,32 @@ + + + + + + + + + + + + + + + diff --git a/speech_to_text/example/android/app/src/main/kotlin/com/csdcorp/speech_to_text_example/MainActivity.kt b/speech_to_text/example/android/app/src/main/kotlin/com/csdcorp/speech_to_text_example/MainActivity.kt new file mode 100644 index 00000000..f44e470e --- /dev/null +++ b/speech_to_text/example/android/app/src/main/kotlin/com/csdcorp/speech_to_text_example/MainActivity.kt @@ -0,0 +1,12 @@ +package com.csdcorp.speech_to_text_example + +import androidx.annotation.NonNull; +import io.flutter.embedding.android.FlutterActivity +import io.flutter.embedding.engine.FlutterEngine +import io.flutter.plugins.GeneratedPluginRegistrant + +class MainActivity: FlutterActivity() { + override fun configureFlutterEngine(@NonNull flutterEngine: FlutterEngine) { + GeneratedPluginRegistrant.registerWith(flutterEngine); + } +} diff --git a/speech_to_text/example/android/app/src/main/res/drawable/launch_background.xml b/speech_to_text/example/android/app/src/main/res/drawable/launch_background.xml new file mode 100644 index 00000000..304732f8 --- /dev/null +++ b/speech_to_text/example/android/app/src/main/res/drawable/launch_background.xml @@ -0,0 +1,12 @@ + + + + + + + + diff --git a/speech_to_text/example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png b/speech_to_text/example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png new file mode 100644 index 0000000000000000000000000000000000000000..db77bb4b7b0906d62b1847e87f15cdcacf6a4f29 GIT binary patch literal 544 zcmeAS@N?(olHy`uVBq!ia0vp^9w5xY3?!3`olAj~WQl7;NpOBzNqJ&XDuZK6ep0G} zXKrG8YEWuoN@d~6R2!h8bpbvhu0Wd6uZuB!w&u2PAxD2eNXD>P5D~Wn-+_Wa#27Xc zC?Zj|6r#X(-D3u$NCt}(Ms06KgJ4FxJVv{GM)!I~&n8Bnc94O7-Hd)cjDZswgC;Qs zO=b+9!WcT8F?0rF7!Uys2bs@gozCP?z~o%U|N3vA*22NaGQG zlg@K`O_XuxvZ&Ks^m&R!`&1=spLvfx7oGDKDwpwW`#iqdw@AL`7MR}m`rwr|mZgU`8P7SBkL78fFf!WnuYWm$5Z0 zNXhDbCv&49sM544K|?c)WrFfiZvCi9h0O)B3Pgg&ebxsLQ05GG~ AQ2+n{ literal 0 HcmV?d00001 diff --git a/speech_to_text/example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png b/speech_to_text/example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png new file mode 100644 index 0000000000000000000000000000000000000000..17987b79bb8a35cc66c3c1fd44f5a5526c1b78be GIT binary patch literal 442 zcmeAS@N?(olHy`uVBq!ia0vp^1|ZDA3?vioaBc-sk|nMYCBgY=CFO}lsSJ)O`AMk? zp1FzXsX?iUDV2pMQ*D5Xx&nMcT!A!W`0S9QKQy;}1Cl^CgaH=;G9cpY;r$Q>i*pfB zP2drbID<_#qf;rPZx^FqH)F_D#*k@@q03KywUtLX8Ua?`H+NMzkczFPK3lFz@i_kW%1NOn0|D2I9n9wzH8m|-tHjsw|9>@K=iMBhxvkv6m8Y-l zytQ?X=U+MF$@3 zt`~i=@j|6y)RWMK--}M|=T`o&^Ni>IoWKHEbBXz7?A@mgWoL>!*SXo`SZH-*HSdS+ yn*9;$7;m`l>wYBC5bq;=U}IMqLzqbYCidGC!)_gkIk_C@Uy!y&wkt5C($~2D>~)O*cj@FGjOCM)M>_ixfudOh)?xMu#Fs z#}Y=@YDTwOM)x{K_j*Q;dPdJ?Mz0n|pLRx{4n|)f>SXlmV)XB04CrSJn#dS5nK2lM zrZ9#~WelCp7&e13Y$jvaEXHskn$2V!!DN-nWS__6T*l;H&Fopn?A6HZ-6WRLFP=R` zqG+CE#d4|IbyAI+rJJ`&x9*T`+a=p|0O(+s{UBcyZdkhj=yS1>AirP+0R;mf2uMgM zC}@~JfByORAh4SyRgi&!(cja>F(l*O+nd+@4m$|6K6KDn_&uvCpV23&>G9HJp{xgg zoq1^2_p9@|WEo z*X_Uko@K)qYYv~>43eQGMdbiGbo>E~Q& zrYBH{QP^@Sti!`2)uG{irBBq@y*$B zi#&(U-*=fp74j)RyIw49+0MRPMRU)+a2r*PJ$L5roHt2$UjExCTZSbq%V!HeS7J$N zdG@vOZB4v_lF7Plrx+hxo7(fCV&}fHq)$ literal 0 HcmV?d00001 diff --git a/speech_to_text/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/speech_to_text/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png new file mode 100644 index 0000000000000000000000000000000000000000..d5f1c8d34e7a88e3f88bea192c3a370d44689c3c GIT binary patch literal 1031 zcmeAS@N?(olHy`uVBq!ia0vp^6F``Q8Ax83A=Cw=BuiW)N`mv#O3D+9QW+dm@{>{( zJaZG%Q-e|yQz{EjrrIztFa`(sgt!6~Yi|1%a`XoT0ojZ}lNrNjb9xjc(B0U1_% zz5^97Xt*%oq$rQy4?0GKNfJ44uvxI)gC`h-NZ|&0-7(qS@?b!5r36oQ}zyZrNO3 zMO=Or+<~>+A&uN&E!^Sl+>xE!QC-|oJv`ApDhqC^EWD|@=#J`=d#Xzxs4ah}w&Jnc z$|q_opQ^2TrnVZ0o~wh<3t%W&flvYGe#$xqda2bR_R zvPYgMcHgjZ5nSA^lJr%;<&0do;O^tDDh~=pIxA#coaCY>&N%M2^tq^U%3DB@ynvKo}b?yu-bFc-u0JHzced$sg7S3zqI(2 z#Km{dPr7I=pQ5>FuK#)QwK?Y`E`B?nP+}U)I#c1+FM*1kNvWG|a(TpksZQ3B@sD~b zpQ2)*V*TdwjFOtHvV|;OsiDqHi=6%)o4b!)x$)%9pGTsE z-JL={-Ffv+T87W(Xpooq<`r*VzWQcgBN$$`u}f>-ZQI1BB8ykN*=e4rIsJx9>z}*o zo~|9I;xof literal 0 HcmV?d00001 diff --git a/speech_to_text/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/speech_to_text/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png new file mode 100644 index 0000000000000000000000000000000000000000..4d6372eebdb28e45604e46eeda8dd24651419bc0 GIT binary patch literal 1443 zcmb`G{WsKk6vsdJTdFg%tJav9_E4vzrOaqkWF|A724Nly!y+?N9`YV6wZ}5(X(D_N(?!*n3`|_r0Hc?=PQw&*vnU?QTFY zB_MsH|!j$PP;I}?dppoE_gA(4uc!jV&0!l7_;&p2^pxNo>PEcNJv za5_RT$o2Mf!<+r?&EbHH6nMoTsDOa;mN(wv8RNsHpG)`^ymG-S5By8=l9iVXzN_eG%Xg2@Xeq76tTZ*dGh~Lo9vl;Zfs+W#BydUw zCkZ$o1LqWQO$FC9aKlLl*7x9^0q%0}$OMlp@Kk_jHXOjofdePND+j!A{q!8~Jn+s3 z?~~w@4?egS02}8NuulUA=L~QQfm;MzCGd)XhiftT;+zFO&JVyp2mBww?;QByS_1w! zrQlx%{^cMj0|Bo1FjwY@Q8?Hx0cIPF*@-ZRFpPc#bBw{5@tD(5%sClzIfl8WU~V#u zm5Q;_F!wa$BSpqhN>W@2De?TKWR*!ujY;Yylk_X5#~V!L*Gw~;$%4Q8~Mad z@`-kG?yb$a9cHIApZDVZ^U6Xkp<*4rU82O7%}0jjHlK{id@?-wpN*fCHXyXh(bLt* zPc}H-x0e4E&nQ>y%B-(EL=9}RyC%MyX=upHuFhAk&MLbsF0LP-q`XnH78@fT+pKPW zu72MW`|?8ht^tz$iC}ZwLp4tB;Q49K!QCF3@!iB1qOI=?w z7In!}F~ij(18UYUjnbmC!qKhPo%24?8U1x{7o(+?^Zu0Hx81|FuS?bJ0jgBhEMzf< zCgUq7r2OCB(`XkKcN-TL>u5y#dD6D!)5W?`O5)V^>jb)P)GBdy%t$uUMpf$SNV31$ zb||OojAbvMP?T@$h_ZiFLFVHDmbyMhJF|-_)HX3%m=CDI+ID$0^C>kzxprBW)hw(v zr!Gmda);ICoQyhV_oP5+C%?jcG8v+D@9f?Dk*!BxY}dazmrT@64UrP3hlslANK)bq z$67n83eh}OeW&SV@HG95P|bjfqJ7gw$e+`Hxo!4cx`jdK1bJ>YDSpGKLPZ^1cv$ek zIB?0S<#tX?SJCLWdMd{-ME?$hc7A$zBOdIJ)4!KcAwb=VMov)nK;9z>x~rfT1>dS+ zZ6#`2v@`jgbqq)P22H)Tx2CpmM^o1$B+xT6`(v%5xJ(?j#>Q$+rx_R|7TzDZe{J6q zG1*EcU%tE?!kO%^M;3aM6JN*LAKUVb^xz8-Pxo#jR5(-KBeLJvA@-gxNHx0M-ZJLl z;#JwQoh~9V?`UVo#}{6ka@II>++D@%KqGpMdlQ}?9E*wFcf5(#XQnP$Dk5~%iX^>f z%$y;?M0BLp{O3a(-4A?ewryHrrD%cx#Q^%KY1H zNre$ve+vceSLZcNY4U(RBX&)oZn*Py()h)XkE?PL$!bNb{N5FVI2Y%LKEm%yvpyTP z(1P?z~7YxD~Rf<(a@_y` literal 0 HcmV?d00001 diff --git a/speech_to_text/example/android/app/src/main/res/values/styles.xml b/speech_to_text/example/android/app/src/main/res/values/styles.xml new file mode 100644 index 00000000..00fa4417 --- /dev/null +++ b/speech_to_text/example/android/app/src/main/res/values/styles.xml @@ -0,0 +1,8 @@ + + + + diff --git a/speech_to_text/example/android/app/src/profile/AndroidManifest.xml b/speech_to_text/example/android/app/src/profile/AndroidManifest.xml new file mode 100644 index 00000000..36edf838 --- /dev/null +++ b/speech_to_text/example/android/app/src/profile/AndroidManifest.xml @@ -0,0 +1,7 @@ + + + + diff --git a/speech_to_text/example/android/build.gradle b/speech_to_text/example/android/build.gradle new file mode 100644 index 00000000..13546311 --- /dev/null +++ b/speech_to_text/example/android/build.gradle @@ -0,0 +1,31 @@ +buildscript { + ext.kotlin_version = '1.3.50' + repositories { + google() + jcenter() + } + + dependencies { + classpath 'com.android.tools.build:gradle:3.6.1' + classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" + } +} + +allprojects { + repositories { + google() + jcenter() + } +} + +rootProject.buildDir = '../build' +subprojects { + project.buildDir = "${rootProject.buildDir}/${project.name}" +} +subprojects { + project.evaluationDependsOn(':app') +} + +task clean(type: Delete) { + delete rootProject.buildDir +} diff --git a/speech_to_text/example/android/gradle.properties b/speech_to_text/example/android/gradle.properties new file mode 100644 index 00000000..a6738207 --- /dev/null +++ b/speech_to_text/example/android/gradle.properties @@ -0,0 +1,4 @@ +org.gradle.jvmargs=-Xmx1536M +android.useAndroidX=true +android.enableJetifier=true +android.enableR8=true diff --git a/speech_to_text/example/android/gradle/wrapper/gradle-wrapper.properties b/speech_to_text/example/android/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 00000000..052e7951 --- /dev/null +++ b/speech_to_text/example/android/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,6 @@ +#Mon Mar 16 08:57:32 EDT 2020 +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-all.zip diff --git a/speech_to_text/example/android/settings.gradle b/speech_to_text/example/android/settings.gradle new file mode 100644 index 00000000..5a2f14fb --- /dev/null +++ b/speech_to_text/example/android/settings.gradle @@ -0,0 +1,15 @@ +include ':app' + +def flutterProjectRoot = rootProject.projectDir.parentFile.toPath() + +def plugins = new Properties() +def pluginsFile = new File(flutterProjectRoot.toFile(), '.flutter-plugins') +if (pluginsFile.exists()) { + pluginsFile.withReader('UTF-8') { reader -> plugins.load(reader) } +} + +plugins.each { name, path -> + def pluginDirectory = flutterProjectRoot.resolve(path).resolve('android').toFile() + include ":$name" + project(":$name").projectDir = pluginDirectory +} diff --git a/speech_to_text/example/assets/sounds/speech_to_text_cancel.m4r b/speech_to_text/example/assets/sounds/speech_to_text_cancel.m4r new file mode 100644 index 0000000000000000000000000000000000000000..ccb3afe392f563a0c8205265f8953c5cb801794a GIT binary patch literal 14006 zcmbumbyyrt(=WU%?hcCt*I-tSqSJzDU)YMe>^vnPN0CG!rA7>>VF)9Fzp1F;ilY@gZ4;QQm=U{H) z{;UT8xEufl3c>Y^iKqKSb~+V_x5cZUr>qv`#@FEx^0FWCkREA}9)fM}iX%}NH$=!? z9z`A~y;e@n-udq>>_Lwn2cNp!CZQ3JYZVVVoeQx?0KiKdb4E!3fI%)a9RdL2jws=D z0Z`!xX<^%s8Z{7DoAerH=U!H#&mw*TB?~wdjL-61U|&y^J#9(GCA5zF-7%VJ={MF$!@nl&gyU0qCaB&@g{;B`~gi&~Sm6aDYxgLaZ z5GoX<(WJB7(~o8J5@%Fz)m2NA)9}mr(}hKflZ#MNZHX{?i#{hS z$IHwAvc6-Tk(bnO))kZmfQP?#!AbK?yYJ3R&({zfFF1T5PUGE1>&_cSZqZFqd>&YG==a z9P_O-uI_|PUqH`1d`5|xkvwtm=H(~4O+WX-F}?8RMw@c4(!t~_`7AXVlvt|#)?+XI zcZ~}U8cTL|cF54r(5zo?=-_c^6YEsuxom0Y3wG^^gvEoUz{R^32Bf>YcvEy>JlS2e>zT$znZW-Lug|;gpns^u%N5k-kWKTtFkc(Ld+Kwjoa9p# z%V4M`#~NN9RV?FZ*CUYI3@>;_P7E*YkrY>L7>6!aosPhtG>H=x1))NlRym3}1!XWn z54qd(njon`H;tsvQjVUKnkjLp3nDj8)3;TD&M94NB2KrPw$#JoiS3e6CJ=Kpf#Bf&Ze^ zEW6QmakvZu)blAx=@n0~*W*n(Fm}P?7YHpCrIXcW8tUexBOTyqO)`zH?aO81fq$3_G5b7#;a z;X>i7r=R!G{rktxlLyxOr;Ba83JKxF&8jSM=_lg0j-4X=El*S&_|_VYZyph`n>i%$ zc7lu3=_79&`}M3!z30EDDgPGIW*CnLZEUQ>eo(>sp|@DmW7U(roV~Pk)}5D+VP3M7 zmlu|om$$S86Q1e6!e8+}%D?hrdiBgHDplf)Z!D+4X26Hn%tXQUA0%M#Yw0=(B%1KZWfiVElXp-(bD7QHb1D50UO0kDb7(D*TliCtg zLz1K__3wj#tZHE(4J0^7HFaT^ge)wjyhTp z(i71!HRgR`?l-m9s2fv40~evax_EL%h6CMH%u2rm=fVb$;`9u(`gXB2 zJ>qtDkD?_aPFy3d=ujk`IEF9jOoe%b+Lp&JlJapqttht^GImD%s62IN#T-8S6@q}{ z69$khqcZ>M0ihoh>Y`VZ3%4lbuE_>@aP zsD_Bhi^9ah=k8C?EGDM)Z1NG&oQa9qGdMWAsxLS@)SCHgJ|)~*pljgP`tbF2xKl#- z&+F?jXhQh+lM~b@VKT3^RpKXMjQqbA6ZiKiP)a1PwIBI6H{2ah+t9)8yz6U5n88VB zi^$ojmH*=q%s{2H8FoCFb;!eez;{sq03HGd;^AOt!VK#-NhX1R?EH59*s-xUbm&1- zSMk$j#Pa5*S~4}#mP@knhi}lYVrM;dVajtkVN~utJCmfms5&Rya~XO@Q>?5_9XSV_ zq-gYpW8@Bt`mLl!- ztLyl%_T~;(M#6n(Ij@s03pqK|_o-3PQbiPi#_pzt%Pqt!GX49~D`1`^X6r7h%_(3= zkoJuRQ*>5(@cj5TU_@Vr9QZwk{_=rcN5?j1xNmP1qv0w|r}fFH?siaYHiEGg-`BuJ z@QxOXFALsTWa8}1eF~>uSM6rb7Jk9ij(61^zcwgcg4w6p?NvNe3Qb%f! z%ZqhvgM=cXm}Ta;9a&nz^+^aoUN<@mGQKkAJ>5`8TIYehYEG%tRW;Vak!-)Y9U^tE zQtkpLM2;WjrkZHzdM6-r)H8-A4bJ@I=}6t!Fjl}~=RSdnabzj$2+E`M8`gDxYOWtb zRVaHdCqSJ)6AI00@z{hm#;ozk;ZN|m#`#IEfHKi~{sjO~Hz_N`$cpTu{X@pX?blEr ze{#04;Sc4*f&=69l?*N2n_XT3Gy*2REp_pR@eL2Hr6_&#`}8>)lC&Z@zXB)rg-B8` zb*-BPyW3vb$pSCn1EoG?0uVIq!gi!wL>p&xamx{rh%PtZKS$mG8G3Y&nHu)DqX(Dw zenQ$`Cikv(4?ob93Tn6MtU#%t?XI2It;OkSCU0-P8H`9t9g%S` zt`E`R7NGPgav(J3q)%Qp-jnT|V_`zOQ`#^(R~GybB2Y!u>cp4JIgXsBeC@r1#wX&M4fP>sF5X>xU%A94}_qiw1?BoFkJS>6yU)`a6 zryGE7!fXhfWxA;u(IXkF(YB;u_k%S=^3v0;wBdE+)&vr_)b7LW)gc59frq1(1tYqr z#6REXHgN5rao+F2;$|yqvx7O!XTZvkt5bEO*!04b+Ybvl_`bc!I6m2aMDP_gk$$|w zCuen>G%{7h^4|dOcFwUSlZ9qKolFa&xtHYq%b zDMXwQ(2`in_cd`=07%c0)RD{6<2Z9p{D%tZmLxyPD=kq#Hvk~7R;yNq;s5feR)_E< zZLmC}XOL9<20@*!yQ4k7UinK4@CBBfqt3w&Bt#v3l;_7NKbztKNzp+M15!HF_mdy93HRRkB2~gCfwgKOC84UjA-UNALW1LZ8q)p3^GYwBGm@v+ zb}fmR{`3!mKdCBwuAz$Qimq2v3h+3!Ad02p`w)|#f4bm(;q#}DMnnM}@j0uN)qNeKIwYNacSA#kJ@ityfiB{=9}d{qOQ^brv*3_KroZ_!Q(zNI*_ zhsIvH?g{)r=tF-u^)TBkCh6s3$NyR3tnJpkt%LQFZSh>fhMNInu05 z?sN3hTp**g8qxjmpLXYdU$^By5O+xH#3&YwY4D6SVASf}BiomRD=7fLra?*ZhGz!m zGRpwNu9GJbO%*C&a@*F28)b>As8^b-6)O-p0v@()7Q67C&Fas3dy{KVlTg9iD$X~4 z2IIFSI$CL+RfK~P32Q*KDM@e(3t5!r-0!`0r3mv=JWqyjId=3fR9wxGZ|Ri{WoxJ8 zlq?`jP|pR$+3C0+r-tja@+aFr7MvGjC~uD*jv5zk!?R>^uo+PCTI;lh^}VMdJzano z4OwZcS#`}**S~R*8}sMgu!xKWK8RM6Vw=PdscqGCILEpvSbo^q;sao+@dHp+G>5`4 zvY-W@u1;J#9xKkNhW@lIMtWLRIeG+qUHkrBk6j54xi=#TIVul0UUE(aiNs*VQqsIr zSnaEbn6~ciJ@_d5o$R=1PjEjpOWa6ut+bloLZuB01(pR}Og;}@m+7Js5})(Q4;FP9ofQbOmFNrT$&4(3n4JAW3*Nw75y7xD z%=tM{*0Ru~!uhvVVo^~N%0BGy%I z;ysI`r0G4>v2KrH0rAcFt*Wo_m|lJ3YMXy_MA^*N`c+Lb@m0)o7*d@s?%U3~lWf5N z?w4d?_Om83nx<>TWYK3Bb)DKHtTS`y;@l|=L<{=AfUTfz{o8V2H#-HJK6pAb8jU|% z4~*BkTb7W$u07dTI8s#0!Ph)OK5sq9H2+?2-nJi*0)I@*Kz|Uo#8%Vgy+>0CQY(+0 zr%f`@2fxW<;t6}O;xl;tg)%7mjY4pca&2%6nG00!D8T=Qz6ATt9lM!|JLs-oCnEVh zVMZg;v_($3Lz*i@rV#R7svmd4XIE1*}E;Ws_+(zZtD zHKwBwn?OImNJKb7#wMo&plkg|WS}!dEId>GuTKN_E}%}GhYMPVv474wzg8Jd4CWK3 zeH9qqrYK=d_M-X9fHP>~F$@PUM2tp9M&71?7&no2rQ^;a8afe?mBDK`tT4i*+MB7b zRvAnmL35Z}Rz?NE*xKwOQJ%6kX$*x&;`kKx4gPu&dxsPt-UnO0=}6C`ruC!MQNAp;9`5Z^o}bggs6?7O(GHM*cgCC2(sE22%ulB z1^l2@YCg9m#RQlF6<*|a#N7LH(&lUXk-I+piV`=Kd$MsYC4z#`aNRCVA zCX@wAlFLiH;X+I3)Cc@{N#vM*73ooC%amyWVkQ@gPT3G<#)W{UW&GWc4@jOG?0%RIFbv_0zP@Xv4s(tP7Z?`k}p}1z) zx==;DEqdHsygy&y!Gne9mB<#;P=SzGHzX$mnU&kr3*q@9PqUH$k9OP|GRrM7ws3Lh z7v4nB$`&s#K;8weRZ^u<>%Ts4CLCea$JpLS7jxYrH;+Z4837o+N8n_l(bIPDbZQ%|4l#R(c{Zb)%fkI|SM|N3Yx++Sxb?@!?@kyTt zQ>jGHU1}4ttdDo^V6iw(n0SAW&B`h8?%NTDMD%P@)#QrFEv_5td+7SiWMk#px8^3J z4ISBvq?kb=7Igx6a4xHr8wdJEX!qL>#5@3LW9!F2aNnsCFx7J9b)Vsu7U`?mY z#QCuLL?5lgPO3qVl4KUy$*dUaoT@=o{>5z={?*rph3oWKuzlQkbYXDg#5J7Vw6Q+P zx0PZC!HD-3ihK}^q))1(D8%hVEEGW}jg#D-um=j|mzjt;wEOACHns}DR1}o29w{ug zgjh&XefL;e=)7Ak<})cTIY-Dq#-dECjJL0})& zw<@Acn4*~yWM%X18>_F>n*%T|WuQ_k86vlOMl60yW6jFGb;DnODmFON4`iN%T4d=Ml!_6|# zD*aR-XrYl9e}SPGEVNWskZ*z@2pa+H8g3qjt9)42IrU__H@j$g8dh|%``I<0Skg^%c!}a~wfsJYrX6Y{d02kOL zF075?enCLIqpqM(K=%DwUXs8;+O$)2Fvdf$^9xp~9vjfZnT$XYk4$e!25y*bK*|r^ zK;x;k@)vt%IdN|v`|YNt9wMNJ=l)JH*Hg+Do-YP7+$&zx_Ll!#6GiU@zw!#;1^}D^ zr`&A2EPl~F8=N9rPtdJSXu~yhSm)|C<#*@ZV^w7nS608*g56#=USwq7_O|Idzkz^o zK0`KxvnHfWd}Hlza?O1O`IB%}D;oYXy6y0QKGT|V;M5aHQW2+^?vX>%j?gu!P8*0whnGG^z$ zNw4Dm*p#%K2N|0j`|i_ZlF7f=@O~#xZv|ge?6{zXb3oyLtmP)nZ$0+FTuoj^rc03V z3i{Gg3ILjR$VS2n+O9?vsXNm&SAKy;5WkXC&*`Lb>ihXr&mpU1`t*K+v1R*UkM8u5 zL0Re2^FTX%LhSLgJ(gXbY5^~*R0v5m6h(V|e(C~l8u>v42N`hw9IZ5QPo04m3()0z z==JT!f?u9jdHWNK8;9@eO{IUQQb33|jxacs-?j0jVQzbUTadE!3KT_#;@q2NuGrR; zL920sqq!AR&&6YdMj?Y~E+&EeC%Uc=&Fu2jn;+%6QI}OX26}Ssg03bOv>Gy_B4G{P zB*}tQ{csWlh*(9!@s@d?7I{hHtsNyeSxM;!^+Ra9Nx#-+ZMoTgwxGG+K@qxGDKOZe z9^R(IIAaXP490s!!sj4`*T=!vMy|m#>gP37j=X2LX-^si?7PvPjt`VbQW1AD2>_1T2Fh$$ALZ zC1j0hBvq)8Eg_eevj5Eiu<^>jbMJQYIbZ_>wpMmGg>z3&bSH&QPA)Ri7OPld5a1KD zKH<#{gq%3oc$K9cc1F;rBuXHBTQxlIn8>c_)fUlHQ~YPG5+kPt79w2v&d!0M5@@ zb_N!=f1p(te-pxJg5I78c7FbVhwz)bvCkD3gIo_*Nu?_DT#e$Ia40F%0X0`b_1n$@ zXk7@dDN*ZGhK)gQLZY@peo>$SzW=^fE%65Sn}i1#q}<3ceW624={H(1OT|{C0$5DR z*`s$A&Sq>Cx|v;#h@)~R_DX^acu0<3Lz?mfKEe&L%zE>1iCHmwQ~uN)2M6dU#VL=! z+8b)aJ?0!StoAKSRaPE~3c2VjOq&8`5A>N{AS`j!5M>0XQt&-Y(!EvvlaU-Ng8Rv7 zhwDveNg`I`jiVze4j*J_dD>?pZT}6R$gp%nVOZ0uoZC{ksp?GIW}B$HJxstfoJ(pyAx=` z%$JQx?&!O{*9#R^WRt>I6~_^5c*^U^R?ev?R*`iGo1@<5)h!3C2(|8!h7mrvkhPpe zJkr5k4Vrk(42CZGX19>`A-h)ws2(%XhYI?)#xB@}tUW7fJsdlbO5?`9brW#5sN< zOWgO)Gzwk;$4aM1Tw^>(Qy<6P1cq`6s|ywiduYonQ{ca5BCsB-U%o)%{D!C!$JEKa zBX?`wt!|Z5d@;0@X%WVS(1q(Gk%jVxe>IQNgxRlK8o;B+h)#ga>}oplii~q4cuSlg z#k1a5#k6rUICWn!6Im6st?+Vx{_v*r&e~8_NQ9kDZ)Uy5kpE(A{w)IO!yPN8Xe1m? z{MuBVv&^@hEh>^l3jfvEDF8nlkej2D<$1l&=?Y3Aa`y;5&k9)WIH?<7F*p=Tt0K9% z>S%ZRL0i_Pg6vwdam7Ai^P9+vYPr6vxW7ec%mD!S8ewP1)zW{!Uv!#tEal;E?loB z?qM=@d)A9;jLq(9GIs#V_iroExA`U-3q_hWhhm+>JTM#Rn@^OR~oG-%sgr zaWGih;6s*OM5oT%MH5BA7J-4S2S2PV%2Sa9$dTZwCP_2z-S6TwPIy`v#?lJMe5dQx zq`&`JnnRY*{}n(rbOf5F$-;am)~#lzeBHcR`C9cM@53)#WE*>Zj+y&* zow@a`Mx=PWdHapB>1pWK6$#XSbE5%xY*Mvo+TK%o1&(XI zGKT?AR7<)}nDnXIbR?dn=K4NTBP}N~j-#*dXB>&Z*scQ4AYhQJSRWV}$K{m^hD(#9 z`R=uaxc!Pv4?SAj(D)?F)NE<@ab5r`(a1fGHCMacVg(!-;hqyD&z@uZh8iT5xWvu; zdnC@7ngkV}nV26aIf5^@7!YNLmqEuPnN-Y$SVb9D#?3>?m|Fy4p{w7Gk0bdL`*B6u;s^Cm}XC?|pNC+6Krncuv z@_x|yv>g!A-D{}2b*I!Ms4Sb?^q2r$xH_RcAAV=jV&Hq(-fi;D5I~qo)^wSn4j_NN z|NpKKeB^usUAy`_J-9gO|8jb7v^A)*@F#72f&8R$VKR{V*k6r6b;AmO95*KWrjBut z-b`b2yIKoHAw~;la*h#{r)H72e6Yu>&+#s%&s-m zgwsjCn^1_*mP%?}60^t(l*QvqpsZ(1uXCy!Vt;77?TGaj%NN+NuBXpbE4NK#T^l*_ z4=G6ZHd?x4B@}E$^W-fM9Pognnn{nIv+qz~%aHPA+5v5g?)e;-lAZq1_EUS2V8ZVA z?Q+Iy;CmJHth~UbxBC-dAQct2%me9$aPK%#km?o$SbrY8{;n2$ifAES!z^&1Xaq& z3^mTU4Q&gLp|Nw}?vgY{C%i_n4}SLDfV#BTMxSaLsZ$?`8=_Hl}{L@B_(fVYgq`3uc^DF75J?kAjhC;2p9!!p_!KG`L*f zo4tZg;_fuA=@1P~lQ;N+Y)S;nHes3fyGWu~25cF7e=?tThel-Zn_^~N1;Peq1JAt7 zOq7x#1J(-2NmL?$g2@FckF9MgcnIK>tE%>S1S|CKq?~|0E!;!*F0xZb_ZDvNuRC9l zH#)puAUowN{*^)X_&Y9v-rDBm4K8;7CJj97)=cjoEq))|NCQ$%Dk8})IUqGaPOP*K=a=ZhI-XYN?YcP~u_TcClLS4ucfe!Kg#_ZmnDat#~W z&j?mgvK>!S4X!uRqn3@24GMN6?GUsiZIQxO$TJ=;wZY#YMzPVsk`<>K$qBLO+_0^< zJ*H&-(&k`|Yqx4`M_u?Xal}^rz-{Sfu+!T~{(Sk`h2Nd>qlp&i*SL^F?C*IxRN{8f z3l92VVeHB|Q5gp5j*J|y+eK7Px<#x`2p`A9&{>SFJ!N+<&$(ey}#Jvzv2_&S{+YD z8tq3xdb}=vtpc6l;$BmvjqgjDJYvC3nOjE@ReTYQEyz^@>?(2sqBq$stoFpx;x$$> z5Kn_eX2hqau#jF6nLfQO(+%;)kIhO|foPb#g3rah8 zj0^|(Poglhdc50%QU*TWb~bIj+I#=#wpZb>)$TU$R_T_YbxtDJM^8p=Yck~C>is#_ zh6yEgoJhokowg6fw7yekWVbgfc{+f3FQRn-4org}x7*Cc(swd!F&&robn)RTSqvodm7eR=<#Xz<% zAGw9#HhJ|W{|lx%0!lTJUaFSMmF4mn_eS+PWnUHnB z@pHz&i0ylL5;=#K!WPHw{c8g{7jAVX58oc1-MtUUfxYO?C#fPJfU+wyTN=36nRsNX z+p#Vn$`s)CZlSJzSEFt{GDftM6`BtQBm3jAznl$U1R$L4l_hHDQ(>Xoki8(HS{wl( zLb#e(6h7LM+VK%chN{2y~cHo z3kTBc!SMi{i!8{Ru-Wz$aG0Y7W3s;;@#X4ebhDPpk!qmP1o&wzaGHv_uo1pYX3bQ@ z)$JMQLKv{=3169|CZkX1d7&pgg>)GjAO|4eC5agq?X9ycvlk%Ta4%Ier%&$sFq>e1 zmej3-?)pGR#-CX!{Uea+&QPV?pNEEc?xAsh&x%p4UL@UlRJ<+U-hO9 zZAq!#-Z`J)BB7#NPyrOoQzTQj(B)(y4$F03g13 z^!sl6$ooX)(#>3_(XWw6uW|+{eUEF=ILM~pz(gfQk4rWciNfIn2KeQe~6GX1p5)zyY=T7b>{M*0y z2P#awcKh9ab`eOOdmtypFkdLDb}v_~7kvrjf4@^oA@&W~l~xo<2~areN@B6dk;ppo z*1kegTseUe&-fc@7dD}yAd(W5p6pdfiXeZuYH~fQD5atc0V1hCY0z(Tx%9~6epbA^ ztQc1ub>Si zkY8JBn#Wl$Je>SK=;#=r{yd1% z1$E{-%ILI+(EXm++=bK$cv7A@r#bw{C-d)w>|X~)t~ou_89fFLJ_Y9aKKdP2tvq1= zc_hDaT3PdVdwbg{?)J8y#y6KW_&pY@oFKYCU5Lk%9Orq1LHnTX{HCZK!$tw%S}@^# zdC;0JKb#w6yDv z@REx8=9+SJJjo+3&9lWnMrzI`29s4F)FMB^&A!X7GZ)fmvMN-iw2nPuVDf4zOsh{; zufF0sbWN7HDZoH53o?Giscw;NUGsg6;}e%gAS)dJem@p~k{ZxXGAkmM)0!B!r2ZfS zb*3S4ed*Vg{8D|?Qsm>c-R++-v@I8)3p&HAot6gyYPkNbpu}kKDR4;e9m_trEa~%S z*2Q(=458)6i-NUrkQ(RA7;m3ITs2%5;GC=C;GfWo%oj;WID=k`7(I4Yw&p-pj25o% z)+m2n9Oe^p|n4; zzh*)w)M%{x+Cv(kDu1x&vd!#qW)|Kz%K=bpvrCd4z@>=mA07$NiWXOF>nrnCPgiLs ztP#f!RmpHZw+WRb7}8OSq!D<7N~S`ex7~Nk5h}aEj2mJaw5=gf;D%&+<={F@O5}|S z7WnB;NR#|pt$Rv=P#KamK=-z6yGY5h8i|yn^TDkPBN@)1%UQ2s9gXn#vjI{}$#Ofuw~Q8V@JD~-D(0xv zgJS&Xxl#xn>3$ZRZM-*)iGX|{3G*^0if13UehsqurhTOYoW*?27>0&EM^Az<08P8* z6(EyZN@gI6L|TPCkFr6?{(5P+i-s`pHjr`)s_=L*Q9qdrCE9v~b{r1qknd3lTpT7^ z&Zm?bN;W&8K7r6AU$M8VfiXUWAW1ie-T9s=NMzlWofatvMu-z`Si~PjZJec43t%YNA#hu0e$9ukdH z64r$%NHkqKV4d1$T#E)4`zEMO>%jf6Y|+lZ6qC=r`@^t6iggG289k5 z4RpD9P`Q2^>UG#bdf((k5lSY$vy*Bs4HTSr%6l@KS}1qWwY}V=L3%nIxvhGj>~v}H z#L75OWk76{K>!8;+#dD2Hu{o)D_sb(kyxB|7r8zqruw! zAA65S9_{|WH?hdcT;i+UFSe@_RntwL1eE_gud4RDnNWnk`Gna+oH4MTqh`}HSgAxqt zFlfS{_8+bcg5kNW0>iWYvuq5*bDcg6FfYNu0tOD|B{*Fep4$+F zK?DW{7>HqgB5-pzd%h&k^xS_|7*q8ycQ=7?SqF<}{+DR}_a%mf#eXmNf9e5nq&D_$ z&;MVwvB3z&$sH_A{sw}XlLNbnv$MSgyN#xYqs6l)2^>uvES~p#)@UfHio?WE*za4< ztv&YwK=q&BQNaHQeBA#L!2I0*t$H@$m*kQBuj2ow0Be@>v$=^6j5Gi5zHos#`Psm{ wY@EEBocw~^{DPePEMRUyFc@Y?|JmK?*?lkm_YR6Co|e3?*!j8HIl(?l!o)yCi6E*FbQ0O>hk`bB5>L&))kx z*ZKF=HFsBat<_7aYih0TnF0U+q!zATjtV@Y6aXl_a%&d{J3B`nF6b%H&dk{LRSy8X z?*`<{1=Z6=E04kr>}F`3@0!ITWdrK7co-6PUSR!XaC~MZAo z$dB$G@(1Gf0*08?I`@illV)Pb57Mb7Q6k4^dcLpDoRSZqALiS;a;3L6f_^83jw=@MX&R#b`wFRFJg_Z*?Z7{2`qS4x>zh*#MO(5hv{3^J4Vi?+X_L0z_TO z&NY}+7kbicJ=ugg@aECu5?q|c^~b!kLUBrJbj;#>lKl!ANq8kdRLR%H!(>omj#jMf zp4BgEHbW=h@y4!VE;WoqRi`7WZq*5063#Csp+Fhjkkmp&V+M?wHT|O2R%3Y>6$(%& zAQ=H<5=k)-!N&HxCU$T+niCB^FhCC0HTf@G4#9gac{&jwZ80cWdXPrdfB$}WWQLNH z&6OsHC54Elpcxb+ZM5?z+iNbAGbl!R2*P`PmL)40r?n6LBme?naL41M*O-CKSP|cF zD!d;OKfP$v^+De4nc`nEpi|xL%^VeKuNjkmlt^VJm4ela!jPHenW1iZ+63RVZ0Z>4 z;vo{a#!a22MR0uj&c1SMAKc=WTqHe8Yxf~CcVJXYlrU8h4w3|+zfGK9qoJXpU4K39 zc60*?v%Wus#XS~}DBL2Cr2X}(S{&v)AL<6PKi&E&+1oTeL7s%~yE`uN9&LN*dH$BC zBFUNI>(N9bzkF#Ol7R(RU`LZ9VAe_(zSMahZDZbR;@>I8VRo?%dY45v+cifLZ+mu_ z>@3wi`d>kY&lI=Ev>{Ko-F|=L!4USQbrIe-e|)+KnttEw=C~h*IA)9)PWGshX*p0{zocw_=ue2g2j~YJ8pY2%Uy`AT@HKW z&n^{W!Vj7e3yx|u5TGi?Cj2HJdRGw=ohl<8>q?-%3p&7N7NsVPM3NT$Zv1oJzWrIr zjbC~=P8KJk$BRLk{C!s-UJSx5cT=I62y?3z5?8&e-Q?$0LHQ~b4pjf>LFc@&#;O*n z4f4#GngQIO%Huxc#4oj%1&Y8WHtnAQ{*bKEgDaEYA`DYgeZhOzUP4QzVN^kV0N^tu z?;30*TH5GHJQ<``kf>r+FAoJC0$3q^!|Krou}7YGo4qTS1BIN-X{beEPfD%ibq{wh-Qeks%Lg#;{mb)t z9w1A}Tx-$L43I^iXYF1L;k(dcp&Ue*S+G9V~vN#Ao$}9tv|aZer0KSpH4%i zO=;sVR#s6QiDXPui~WCYXUeNf5|VuNF}%%j#zsR!JKLtDr0D7CnR*rem&DEbaJQ~L z=pj)pEomku&B^?QnqtqXwNb+sWHgcvXE8>LO_nXZmgo}6NY1Sp|JT zB}t9D(2N2Ulj*xs{YP6uvTH65p@8_}tON%MseDlw(j2UEfQ@?!8d0RC{$R2lDU%^u zO60e&8um3TF)4U-5m{5WX=0SKd*LDPm#*;FosGOEair;X_SqqcQ-gV&RZz!O4s5TV)&F9!Q3tK%L9$vDimy4(Wklmy;~Hk+T}B z&j{)ItX#>+e>ahGWIp!lW5))KSs7bjTST;|`mmvWkHUsPSVk1&LnEqb9RE(hc^KjQ z0B&w>!22%j_gUPW^{^cs5hQ&4O_c9DS_63Bvs)PXP_FkZXQTtjE_&B{P`)^2Ql9oB zlO|?lqzA6A4-Q;fSO*~KulJA;RcOF?czC==&&UXXq499!yRgm()Zdu-9=(!jl~G{( zwW?-qCDX!;hu$N#L9z_LgfwS;K#!L_Q8l1xt6T!OM)UDl@ z7nZ&+qmcNp_4|8z@JAAl&9%IT2fIywPs9HQbN1ientFOlf{-I9b+?&4JU9q*6YJCih2CU76+z5L@d2a++$*a4NFwq#-*WX|e=&=LG^jdGS!9@Xl( z05!Y5E|Rtw(-XdF&EU;tuR76HP7IzijJZFGEQ6#duHdAzkBetr?1Q`O+Ja@3zK0ub zah!x`xr<9LWm5_$>%0GK!~?$)6D5tZ=p z+BXm67XXIv@bEu7!U-=TeP2&KVuU_4kaGYytvtP+001<8Pp^AS0APFja7_dN8y=qL z1OQ;Xed{{_0Q^tU1^|Hg_U+qN=ne?lry~Hs*x$eE383+KWA^}jo4xWZ{j`L1~ zcjuj7gfZ)Ua^FPbtS4u8dNE4Ulg-m$fAWRJ4WLyoq67{MVB9nZ2KUrN??EEY9q^Xs z%R8<=50_Z5nU}{%n5Qn!x1MI&TcOSru#a{r%mu^3%A%n&gQ^?38YJ=uC>2vy`mk@W zKUPPb%;<>0zx_D0nu0t@i9erTyBtDOFEas&L#5Io1F%E}qXZIh5{#4tNs_l8L<{2b zYk@<#u5z*&5>ZpaWO1w>Xu%osZjqsp$ef^NA`Zl>ko>d*8KG2$>jvqB;vl_BF*EjD zOP4F&#W(LclBsv+L0;1N1kOnLW?_xgL{ub#(eK|-L@S*aDw~f>CuqdK*W}V+#4JML zC{~~Vjp}4U*coqjR5U(`(p)D>kFpfYQ%PE5(*s6TDWzBS#m6VBq#@n8x zBo>;Q#h)Q$y=F@i@E!CtZU|;6lzS(OCY%rZat|M}u<1AVGu@U?@h!*TaGr7>)l~Gw zQe;1IFm=fAo7*$0!2smZ*gJeXUWj~pzek&gCZ;GYL znF>boR0|}lHq;xrpF4eJQ@zf$8utA<9&h}NXzY;NexC#X%}!u}UU15X>FY4v!w*S) zr_$z);$pL6U0$;x6?@WF=j{m=9;W@O)LQ74hnc0ZvvHj5hmi=Dp3H1aQ*^V9icL;P zllAy-5W3r!Dg5y5hsZawUL0w(oY)B_&Bj^*c^ziGp(s zj$A(K>3I$kKw~pIk~k_QoD2YfIzAoGbNi4aSYv(yk^wH7T?l;~__cyB-B4_B#gQ$g zFmfUE8CMuYgAG!rQmu!pzQX=+J+S9atjb@tXERCt77M>P^3zN1??dTuuCWd4>Y*Dr?k5t@dEyBTf?sp%7cyIm)lrqgG9~BnJ{vzBFHU@_qt0|Tg z_Y)h9B=Jh>JndDa$*L75BSbN&SF^2J`Veve{Hh8>^_amLQ8*=jOtuvHMJ4`%C@2vO z3`NveTdL0DRjmMSo}KtGLg_7}iHWredJhi(f<*}?0DdDU1Yt?GzU&c zM=P*lYT)d4QCoyter|>X_K8Z~@ zKJ3@|IWjbmD|L6$LnHV{@I~G~MMuKJ{nq#QKcO{-yF00Ow;S@-f*}Z7Ktk@iog{Mq zCNQLI8R9E^^8%(n2LE~h`*;8Ke*R>0uk*+0XYJN#XPjC5_R>9T0+tn8Xq=vGGIF&4 z3$H{%6?R5E?XP-?0@S(}E&`=@?zMl8e5hPpj+b>PS=hyEvt{b%j zdT&%dj8buuTt9OCVOt@K@@1akXWO3vz`0Bprpb&n0RYT8`zg{F0E#2)>)li>u}I1R z7pIb$Yyj5qrQgmU(t`CtTzE7y#=Cps9HHcviPUkJ#L3GqnNplQgXhq6hg2;%R-Boytk>P=kz(DO8@2uQZMK=JQt_ z8}!(Tyey@tRNigq)9QF%5xKckcmO2NUxIRE7Yrnli^rXBad$km+~4`X*POul3?Ip+ z-~^QUO>+>Jgo+vqJSbqYzH#qoZ7DT9G! zvJ8M3N~~Sq=6oMf&}Pq&WKdGvi$t)fyeF=@W~#N3&JQ8S6PkP>X07F#f}iO|tZYf` z@J>?uLbnI7C8oXDU9rHzb(`nH$M;}6$o4&?yT-iI{$b2v$}Fq%u0cO0MxSRg z=V{g5H_|mCb^}96H{VJIiC_E4E;ppa$n8YeNo$##of z1W>ad;)rC-h0-oKHAFKE3h0xBOL&hmHhQ$0UR)6wcZ`fvAFVDCu$M(kt-2sp5EHpF z+eGV+3WKmQyVTfFS@I#X(q`YnLkpRbsKSm(!wIrjBHE#p% zURJl#Mut{L9^>RcFREnb;yMM#LHH6&%xm#)a)Sndcb1%fm(b)qvewz?z=>N5>anHHPET)I!rCeh)&I4ieO8D<^fYnGw z-*Gc-8cv?P^wxP5OT8*jLrqaA!}}bPX>XNi%#eqbFab0WfAo>+f}zX9<6uuGx~dlt z<>Odq41Asr4%I(y#OwBOL5k>xtP9F4C_>Gy`God}rb z!+Rmr z@47nK7I+GfT+RRBp%GQPElD(Rn9HI^(Ri_|CSf{i)6lBZ+Gt}VsjfGfTYW`$wka=E zXfr=$f(!GOif;^QjT!Nh%I4hUvEmV+9Q_Q+l|4<9bUd!cHW?x{nIu#emnSq`J#F{F zf#kg9FN|P_L(b2y9q!Lx@%kcVf!9bMTc;I(oARuxhtoeYs27CGSi&{Cq9`!_lMf!J zHmxUL#o~vV?_%_cPX1QVi+VWN)q>p7GJ1*X)wd0dW?e3`IwkfI5St7 z2$$SEnaVwAE_?orSyV;oNFtH3qJWml8oFc8HmP4M;zgElp4c({3 z)Pxj$+hSe%o7#DDnC3Icg*zU+n@<+V-kO=5cNDl?#Xqxm9rdy2AK#d3&kD3QjLX{Z zS3Sq2%pN7CLLWc=S;}60_*(DD^XVG02=>=G-?g)6D=clZuQh~eMTWCr_@V?edF9iU zLAAEcqd(1sYm`*^u~m{5o5>tMP1_MNppm%mO$upBeK_Jwy&N?b0HQ`wmh^~9iPo*+ zLiqmfiN;dwU`+qiLmJS@4+SsC{jS$$skZPEGvSBN|B{OwHOA5u06Ss_iqRI9&r6SX zi8kE2l+nTCafMIZaOBd7{{9Ak%=AlODH;tGzjWWajtHM*tODzVK&8j8n8=YQk=E~| zU?0&%B&KO?rfd_p)G*FwslB^u%aaQ-_}y_Wcl@dC*ab~WWjl6&#B{EMZJ)$R3a3Je zwfR0u4RaDGt}+>M2o?@A-?Wo3fsO;8kUn17nID)18_uEVh47-lxLK8@nG}aiC7*BO z`i!!&9QBad;%Q0T@XhVd`s#V7ie4ASntaX3!8r0t6|ca3M;Tkw>X1q8e>tzZQuisu zq5I{~m)ZZt4|2bIxSIQ}#1bdhpKR`+rV6j;4>~JB58IwocKSmB((R~S(bfHJMWHB} zyqG8@6=*6vAuT7cxT^*(DMxgE)aeIgp8lN{@@|WS(_n`wF0i$=@v}T;qaN!p#WEn09 z?|!9dW^1D%L@N`J548%4$V&yPS}D7K#1!Od7;X0zbec3T5>kV+&?590bQ4$Jf=yS6 zK;@87o;%>?!B5^5KCAeVN;R-!6n&fh6!}2vv(q;|;`QjX``xoD&t0ob$jrruecO~; zO@~XD_k2OmZE2!kEBcO=e^VCYAG3le^8BA5%aC`w%<Xf@s=2smq<6x@b?J&4ggt-g0GAE4QZ8{$L) z<0hg+B#Wkeg@{wV_pzL6!cnq0;->^(=2BAP&RN{<4|FVvf=Q3SlJ-X0m?_H@RD>TW zXf8U^4U4fm)2>^icon$~ROMgT>0qey3{p^q*mz`1v}V)tjYhI6JBtKh-Fi$1>9jr& zV_3O3CJgj+*{H@%;~|&e># zN*_CM6M<%{#9)Ms_-zKDlG|IjX#r`5*IgL*B-AaY$l1`kk`Hey+6gEY>@n=`rV~ zIr(G@H;v>~+rqlp%g`tSM}Sjo!zV|F7)&G@50oV4tDo)qD-+7Yn?hzu!Kr)|qLfpE zrQTN{c_zV~+GRiyE+XBpkHi#ANcm;2_WQS;{;5=(6bW4|l6!8dwX{r?44!$WVEk${ zg2b?vT^Prpa{d7H;SC64yg0m!_ML9l>1^mAu%%Z^w9E`9vAfAbK(1#3cqbhhjNz?Z zq+V>O_r`Gl*4n zM)mz5MPP@nj_XUvo$%d>(nt2y#_KxW?P=0;8S%$RnFjY;WM?)3$oEmos0I(is+_)W z4+*`-bAtv(mtU^c#OYcR7*CtOa?gJOsedOhGa;G)@5?*IDWer(^zN8YW0BCP2GF)P zm)BBBFvKz#ji{$cToD6uXCUAMo#h4S`eQ?^Vxv&m$n{#vDxHlac$ z!S4cXdWzkTnqZ8;bA4TL0p6VFbHR51LB488NJaa3RykOo9p~Lcm@_61$HAN2bK8MtH{P37{;nh2_F#I}z@kAfdKmM*Aep~!(plnB4GcB(o?mxz@_c}u-d?d}aDu=&;xDttME`eoXq{TTb8E#S zh~uN7-=(Iw3d+Nd z(WVPAm;G>WzE1U0A5CwGp zXl|=%9Ym-%oDm^Dynwb4E#9K_p3M(o;+%0!X=9S05DJ3 zOp4nF$yTzQj-f#}C5@_a+NEC!%f#X@4!5hNhA1MY1>9FkVV)8ZBjSjPLaPL#XI6Op z*5^FsQ!0O8;G+|cqj7)stQ+=e?#*q%w;IQPZ|-?S7dd2WrAG1ZUU+|4_)o_=)SLZ( zy|f+1DqIT8X4ZP0X1DJ)d>ai)sgmMrCA3#H@4?!p|E8$LJ0pSFvHQ0w^sDjb55iSh zZsFXDjvhF@S6tk2lJdjGCI?ln1L|=+;PG!VS(K8(_!c9jCEZ@yi1tEg&FNpcg}#T1 zBh`ee;kbBbSx0K~Q}42#C}#~hT%g%qe4pJptdcDd`mNZsaXJ+2R)Hs$W>~5VPqq1{ zV7f9YI9OSvtgdy)J6_{k>Td>V%)`s3#X)Q}-fsaR7CsE}5|K!u(&R)kqVWewwFXY~d9i|G@qSY|p22Jr9d2wWU4C8|GufCT$oV}7`yLMwh*%StHN%{0 zgo_Z6Z`6)LM`s)Oob@;TCNV2tT}>bMM9_nT|B!F$kFIm8H&3!60+%LrR*??64_@7b zny)#Dl)|G!g(YnCR8J>QRbnpseG!3jl!1tdBM=|z$HpV`km*~<*YNJ=bI2J)_|J6q z<|bpp*zWt&=D6WcE1tS44x4%xf-!KLVbmFGI9x1&*&jt1e%8jrfGZ9pklzkwOYl6c zmfJA6uvzMbo2mbhPh?F4L~a#?T3>+wpLn zPs5ILyfMK`o5-0nIpk{iGq{(G!>u@r%v?)t}9IAfJz-b*BTTajG*7{fNkOv*O%6hyhIs zFcY$CLQ_^8iP2%2Z%|sPMvit=rKpw(3$7c0l>z&G;q)*nur+`;>*rSvnA&p{JEEU} z3Qh7S&(kaZ+`CEo@1$Y~k9fa-(fb`WQb~dRpB;u#o8j%V|LX0F<@9j(HZ($?gG0}w z;{Ch04zBg*_j;$@&YVPTJG5;ZDr1P>jqHJt3TE)JUUn>O&?{0;5J~zNs(?JmK1;7eu(Y7O zAUB_O<|16us8GO86yiaa7aN@RijL$ojeO2E?W8=0yEJ8R?puO&H`_3|A*5(irzMHi zsDsW@Eky(S#1-3PKYi2rzh+zI*9I|}`28{D)U)U2_WdQPexnx3l(!ge(n|~oEKQ#< zd$Y4lc`EzTXoWk6-(u~9IU1xrc%P=4F?`nSO8O>IdZ;WKkRMtdR#uDntuDB}V^!3B zy8jSDoFS^#tr?E%7ZvqN9?q}+E-FUI>Cp6^0EI`8qUy=2m3@$3Gg zSIy3m;sw&w-rf%!v|P*@L!GfCY3*x9M3psEDeY^KL5xO$Y16SG8gwjD)#CYE4coS5 zU+GlIb*wPTc;X6kCxKhIius*UI;+}ppIvg3BbmasMK%Sy^K!n~v<7@ChJ8i_r@`Vv zTJnR7kS~?YWW0z6e=2_y{|P?R-ld%+J0R3iSKeQ*qBVkgh8?d9n z`iYzv#7*7p;{A91gz36HoUavLap-NGZC?4PX#e~?aE##o`Xn9h1(IRm0I`&JUJew6RvwE_Q_q7H7WWek9}E-dW$%rAE+$o>h>u zL!^fCB*v?4cR*d6_bMW{&$8CQDnlrRG^6ciNyGDskLLJ(8|7{~}R<+~Rj z7d?$-YDu-H4oRodTGPC#xZc^%(1Y7+53QOLV9fDO*T}&4^2!|l`M{t%1<)IQZ z`YHL>WH*uw^0$$lOq5_&{%pt z(epY>k-;0vpK13sis^-^PUSu7{ADDf*`@$?mcDqxIGnT$3S~lkQlDQr0hlv#ZUcL< z!R3mzL%(T@h9ZI5J$q-{XT6%9M^1FL)+P1sxjB;sB=3sF2GxTD{xvN)54t%n>&~nD zbzcW*xrdCu-#+ve=(?SpmpzkN3c)OGC+1t_#`b7iqp?36y^4XyCx2@s#V7X?F6b#Vk$t|>@;CF3%>K)pnKLb*b|TbCyNX}d4E zpUw4v&^nZ(r!by&Fy@oD(oY$3L~gvKz3)!e9Nb$ipXtI`X1|Q#rzdR4%NFzOa*$x< zCZ?i~BvCNfn-)lJ>#!)Zn4$xQ}IFRRB73|IMigUYggs_W27D`NmI{>y_Q!!3^`i@W|1%Uj6!@CU*3 zv+50vD(*Tjw+3b72*-22@l^qIdwH}fpxc1wcf|J(j_S!XZ zbBvWjuFeB7%V*v{OzeGf9L*(u77GR=B6_%_c|!1TkJuzCU}V`w;<93~gkmo8>q@_2 z(f3v}KUz4Lok2jYMHr)q2rn-rmr@jeuIKNH^-MjRoSHV&Ug}y$P{;KfGKj4P;fapV zW^%6slG1(5Aq+NB`d3c>g{V0flRb z`7|T;%NB5^2UWl&Q)Rtf z(PvjJ^lXovRV{Xsz2gP;IKhxxiHMD)5-Gv)AD=dAd-L3V0a^7r%&UOz;m$9IKiCc% z`uREhut?eHsHz<}0>N$Zb&;f1I(`8c(Pv6@xPJ)itaZcHCAY(W<*kQQi*R43&={?c3a)ldTZhm?`632IJ{&XuebWBkE9spoy<8>NknHRj2Y z8h3cA{RYd-pde|#rXG=R!dZZK6*TUIO+)a!;|AB*rk+1*Q@OpBU1-Ii6}7GpZBc_DXi-pY*ijG zJHBxH0SRerT!otoaVqP>sV_l;X?TsXW&x&+zOiz@dNkLn$Sq|agT`sLBga|q5_}ne z6otol%o5gxT)LFf&XN4CM*7hfcd5ytvc)X{|2nWM0r{>Pi6Fsp?&O5f%-TMcm*kx4 z@#E&!QSr}(*T@XDE=bt08?y2Q+0gC2nTE9eg=~8HXL5-^0XZqUSh4Birj+hJvUKdN4Gg-GLbMz4 zW7m()Y$VVecd8*OLOK<$Mko#b_pK%m`Nv~YgEvtwE+c%ELC$$%nj7dr)7xLuUEb8; zBzVpH8ZnC+Bk1Yc)CXa_CwDRwSgz?mjDIIsBd^X7iyfm{OkQppc*t5AP^?+ruif*D4} zZ>jX%JkY^AtXc5FkzZJ?9d;$m9hZmOkwWL<48`ld<5*0p8t3%YU*tJFPK{>e3bUO( z{Kv%PkNyv~5c`*|Zii^dli~f#b>0uzrx(cO@Vl;edY2n(Z=SII5X^_K!+92ArzC1Y z>TPeCCN;@D?bMOzIZf*P3=F))ncU3e_-0C#fs`#dpKOsm@XWrAZS2z5hY`=uVK9!W z{CPg!|M_LwOS0|kkj~xMi)YTN1|x+4dzm3jra$#VC!| zyc}wh0GzRX-w#wFY%x4jQZ&jK<0OWH4=WyZjHu3AH%=*yGIINHn2ne>8YsLN=XsRs z{47s_U9^V7Xv^FVW4qysCYa82tOTevkE|7&O{tRG9lU~JWRI$B)si`63z8HgQUbB=wVt@mj5fQh($yh0J;9-H(# zI^P^%8;{&&7#e=dBeY(@+#G7|qEX>sc`BWnS2I!h${fxZ4Wp_X+`=Re-}cexO)z|x zuy<+Q%~TH8uPA!(?hvSR(DM?SFEt6OUZ{pw0n1Hr;Zy0wf>Z-Lw29kFOL@=s?5H{= z7$g~f_7H-dS7UI*MT}*E1OQf55x3us`5a$DP&;Gqj-`uua@68lXNTj_94D)W+dt81 zV)F{JWl4Tc)CvqCScgPz88OF~#I^$da{cpz^d{KwS?Q(o`tr&55VG>(e|_jL*}Zyr zvsiUrvv1pNo$;Gq zzEqbSefp5;+9Zxy5zG)G2=q;N+E%1XPZ4KW=``aj38`Suj0DhmiU=ktyKZcSx4EkA z&`1ztRbP`XAvX?-Gi+K$C`fv~SoUK#Q+4uiLXl9|X<*{4XhKTu@qlC@H=P-eOlgL= z!VbBlB=*P=#{+}cH*>j?Qe`ut*yz9xQIIajQ>S_`SC5L-MAYCOh9teVs)i%KI)lrs zlG`<(Cfaviwf%Zh7BvZA?zg00Q8J}z6|FDF$O2J7BelUC+NNYmR1ZRU{F9mQ9Q)5ynm#uZ15UU&yvtB#!(1 zh-n?^dMdFdS-_-$HLM4_3W=niq-Mr~H31y9159W9}gMR9~!_^wxtv4Ur z@kH+`PBWb?LxaqF-;-p?qllwEOoe@SJpSvj)cxT77WOT@%wIioCfNq=6gV_-U3Aib zbRbu^)3g8abzt3yrO!V$W=4wbd+|2}e}tUxb`RHeLZ1BV!0SyX-M^++4?KU2PCeW^ zEcx2I4XLLa?%aji<{qn3t>Cb|?=j>O!&8%2um#Ly;^=7<$9zR0beHN)Zxo{{XV&P< z2Mlx0AhBMbX$mo2SIv$zC+@`HmFlD0tHE z&X}k6MB@c9D9Yq`B5XN_a|}N~8Vp&o0gn8BNj2`^gucXpx-2tt1kAP{@3Wuq)n)tU zc{ij9VtDg7Em(K`G<|-}-EHrY?KA!sF~#wndtVb#QocZa=rbjon??|6b4}*o*m#sO zeikbA`C=RVp~=-1?~Jr$Obm=D73M3_U0AT9443BiiF7Q-PaaL>UVc8%5pMj=AioqR z>g-jgNuxH}+b?lqbFH(8p{#v*+X)5zRv~kokyHIRYc`RNgnQ!7&`rMY6mni`;g=cbkQ(@WG+$3ezV&YkwVB#DM$LNj=yG!y5`#B%@X4GxP-v-E226D^a3-oT@cSN=rJ)!S5Wd^q&8*07RE6 z(MTEMvwa)Us?O5+@qR!;U`_S%Ief^{Sb#t_7vCg?GFEmiC;+Cg%~WW}WWJSFX7(Y- z65LtT`15X$9jga$!Bs;+QXC@zRL+}*koupVethwO#BV;{*KGUXw#)xqtKuGSuD(1Y2FA(AFaS;;30pX;V{sV!(19h+dJ(#>^+{1=NQ+>x@c zFkyLEj1i=l%cW|e8f$_5Zh<&Z2a;F%Bxu_6i^#~NmB5NB9sYOH_? zYf*~kM1po_efM(u;J(cz;}2p()r&S%Pwm^0x{x%R*TN|>A^2z$$KHG zx|CyNn}slJ!}09>t9m9}O394?8cWRP>actmacdk2GErahHz}io0xn?& z{@2f(uv{(oXmnFbus^_gMRNu^ez0Q)C7E1Xz)g|q;XashN2o=s_bAY(q^SVqN}JFm zp2gcmkEaL7L$~3J*X^&`?(rwjb^b%|(~Wjw^*Dss*zqHeonK(QGY=JF{@g?&5I@-O zkvygm_lgh3$vjHX@_6rbheZdUSpslsk2&C6Uz^|=Y|d5Qm-@t6he~D=Uk+n=hJM{f z-cJNT^CtO0TDT8)kwlMc{_NzfMrh*gB@d0CO!!H}3Bw}%=CH@9)rGRi{OZJ-L@u=3 zJr0sdu5@L9y0o!V*pV#JTbRK(d2o3baf8LKwD#eK`3!`yLWfz!fwma$>@ZBEMkLBWB4Zu2P9Qo zYqgRJ)bmfYdaw0EPrH#6#!ZhGT;u>jZMP-gKIh^ZOX5P}BHXNvmp0S2HWZw2O%OSV zT=pn1G4LaYG)hyJt)o;86xr;mOdsX=#G;V`@e`$chp`6Dq#Zg_#nFFI3ui9(CgK;5 zm8QI3%&d>RDqxHlNbW~4`CnHLCKA-JzI`Br;cecF@UQ#bA=^t~@c9dvJ8!eAOZafO z`*JtV=la~|Yr{6Zj3>DqE$OMMPuNR0ZXEviNvtIIE?9ISgm>5Q5;pGsd@)7WPwd#V zahoh5dehWX(0}l^kUx;H3@%}j|tCP_etf!GMY>0ezwJEv=-uw zK;h)1$$;kVEez66`GiTO+<%F(7_n zo0AgzjKY`u5V#pnDyK@^Am68;CGi^50KT(zhD1!3tdmQ{LtFN!?mHSZMMHcHXGnK zJ0;Qm`B`<$iqWL!Z+@>DzniO~KPg{r4a~G$65bO&GU*%;j!$ra0jE!oDu$s4o;6A* z>LqWFB+#=wj6(xLVTQ7ZMPWd%{CUOSRW86h65*K65%uFXV-I!ND%MF(KuolK5QT3^ z70szct@2n4^6rJkqs52l&H2g^7d;vHXoqxjs&|(q!3xo1Qiu$d%{JaI_M3)i1`2{= zne)S=5H3uuzMz5-x@?_UOiCUR|f9qvN@`2RUPT>X5z_*i#5 z-MP_X0A7pQGAppA(|r>AltZ;7k)|9*Y(BEBNYxOqmoCrK^pq+d!jP3Joh#HO=C&;m z%Tl`5MR|l*v9<4UpLPD23W^oDW>DUBz7s>s&z{L%J{XS z{9BOxGDY%C2No>1LKhmGpZzNCtD#wqbzp-of1SnMmvit3kK}Hm;R1-s=XzWWzd1B6 zg>W1?fj!4Wu48eOR+WY%evHlsk}RQFIoABI&!4Kt2=f6h&Iu~zXDtkg22*ODc9%}c z%xb#G5yPah@f_M5jUz~UjPJ6OZ>G+N{nseRT0&Y2Wg5&RxLg`NZ)kjImm;j-q=fs- zb)0J1_xJV&X=@3>rBKFlupci`Ffj&)KF|?53Fd6(RE4eSw2OZBQGBD@rdyw2<6It0 zw(0T&!N4Is#nHE{qW%*+DdVH_eCCU7*ru>?SqXW%~BIXJjOlWD%#xm%e* zIbdaesp%B}K(zz_P*es0LUU?f3;w15&k0odf1Qi`TkrqPVE_P_Kv!pDTWGPKtL;C1 z0{^Gae_Mn0`#%Kyr;GoqpMPuqTTSF&;XfS1)zaAIUlq(7J2Puz=oy8b+5c;n)qu~? zydNlWTA6)u{zrlyTpZl&|CL|4xP!Afg@QS>B??hDGiwK^3i#d5+TH>xV7l1-r{UL% zP@;jpNPN|knmL=l@>hDD0GgZg7mEMLz^^W@CSRbkjf<sNn&G zF0QWs8s)WRW~c+aUO@^Sf$6INrC0rbmHoqepk-hGDf!O({ZmxSGD+s#kiQ7Ymdr zyP3JZj#Jvs{FVPhRR8J0M#Gs854J?vnhw+dqdX;RL`L^`Ouh4(e-oFQ(@@Uqr#;{-Fn{8I>fTT--p|% z{70)yV#6On2tA@0Cqu$eN-Z_n%FvF8;XKNZ?VbCCAk?dPiL`bF3{lpwmX!T~eTLJ) z9UHOm9&0+%e(Bdq2VhY&km%+PIDmBzfs@=iBIs_3Jb*Kw>rncRzC2tiJMn<5Bwh?# zHO4jBZ_q^2ijZom-3BvD;kO`{Xwdlhd%MI1~2+FZk(xo`iZE zhoYRM;|@8>i^YQ5uFIl8%k;mKM)ywJ^p*TweG&1Ktvx~9>GWtX3X~` zJ4%liX}j(@Induw&jZ!bc){DnK2qi+@GW5k+G`6ybReC8TSxP4Bx!5hwfR${A@ zj=hKO6mW}8Y377P=BNCgM>!C(#gv+M`3Sd-ty_|ie5i%ui?bwb(Q~9>XI49XiLZJQ zXbyuFd27=mwlj15iVVs`J0#wp3aXSCvK@>d+lDz8d0f6{^R#DFw>^%^%t0TkK0E8O zl)NpY<~5bYgMG}4(KwqcaBv{{(YS+)J4O0;DNA_5!U*w-KbXGnSl>Psb|Yr&FpW< zQQ?3(k?0@|vT2yUheKs@G%u+bRj4o9T+;c|>5_($2G{9=+GLL$(A{yH{mavFk>n69 z(sG9E-fZ@seZf6ZFmXa~@3s_#0F0M*i~6lCUzh` z5GR<%Ww{K|hKckkRQyuSAhbnFOVzRj=tc@9nJ-8S}IUq3!* z-@y^*KYd-zZbWAby$M*5)293?J)~fVH(6=wZYALsQ(95V_a5_m8u!~}@StL*dU7n| zQk|~rr>l zNM;U(I9PrVRq%Uqto1;;+Ndm^8C-&Rb=I67b||CvLn_)xtl9uIFD7d$WnyxXS&%tK zFtS*tdVC&CM?z98rBIfK79k%<>jjrn?`fRRF<@$t8OQatHnlnkwGLp9C^J+p?#wLTvhXA32393j zf6NLZO#L9f+Ev(e=3GuGk*-H-6Eom{zpML+BfN5mw)D+#3Hir1JsyiMsd7&4EiP~4 z*@$xWICXC#LuJ@fAt)5z)$l+ae>_g9X{zODEJ5~fGtj;2IU6yl%xnbJcuL@?!-Q{Q2|nY&P%8Lf_mmKY{Oh2miL#)(Jy@i%l6v-`_v6^gOOV zibCst-#z8m{k}fkg^n(fU%CYVG!SQ|6BNyH@F#SnLc@MK;4=$<9KXykbW+gPmX>$n zm-R5Mr*+S)Gy+tkzL%%5hp?@dx;Ugnw?RPy7dvrC<~u4=iM)%9v~Ox!y7PYT+@opM>C>wRdu0relM;i5R+%iWyj&Fh#uz3)V-u)l(f!JtLb}{Q9AM=Q|u%)=|VaJRKh;mxg<&Moog5Iar3gIPr-B^ z&cxH>@3|3NbTxEEa7eGt>u;cW!hsL#HIJSGU>x~ay+w+f-`~=6c7p#V^SID0alyah}FS=Yucu z+o+~!rYK1cXaRuZUoKwLM_{w5%)mjfO01TvyR*Lm0H~n>sS<1zSf=kGNIDF-<$WsJ zym=8cAL~aT00>p>5boVeXW{bP@jf*oK(T3>*dPkcV#1vWb*LIK6E+>6#863LN%oDW zSxjKcMam!&y~PAj)rlU63>@)vbl+jB!Djf}M>9$@bo&rcP;H`>+u70E1-8fzyD@+y+BfhD4pNqb;x>&t$3&DYI0gI{YOx;<@0=vsc-hMcb^ZitbKh7i(9Q=^<8PF2zksoE*GHt1|9J2PclbW!1h z6DhAHQTQIE@EQ-RnbyVpJB~Oip2l~dokTh4-#6%GKO+^Zjp!=fwNkE+%kQo&l2Wp0 z4Lm$exgwLrWn?;|bQ4pF5EL7f)$yo5qtXtHeV{>5DNQSVXX_m!p}-!+2uICK%OFXY zGk`OETCtt&#;YUvKL61Fdb+3hr(xCmk;Y)hoJ3$;wXECXY;2f?|1F(JYTh|1>wc6O zk*uE)y`<3Y;iVSkjM{3q%el7A7u_gEd8#AVP?^ubXw$WPfroKwT*HBNC zMx{+n5=SMX_>~HIQ%*}|K63-L-P_y!_M@$Z#ojcYXp=Xt4@8s`zs9@uh#QdAE1&OL zds)>}2Q7hSOs$GuUqQEzkDp#+1?GqyUH7(Goq3=TQZW6X^Qc(~r6#6j)V^7MHbmD+!&Z|OsOdQCNpVw!;z2%Y?|cd zRm!?Bd>Lj6sf1SKz-`Xcp`ju0@bJ9OA8u9mBqb$+f`ZDj9KbO5W{I^nOY@E_?42~r zB=zT%V&lq40Kve;%?_MMO;DR;K0Z^VuuY}Ei9f>d-a*9~Ee$2uM)*t~EeI`fJiK4H zlu8v&7LVH9Ts1LLhF{hD_1j2uURsAfhWNpqgB=de+#IhvE|hQis(Sec8SxwgR2X)@ z`5$Mf)yS2}JER?HBo!zZ@=d)QXKfh~b`>@$=Vnf7_X+K*{fzi&v2jSFu-&q2in4@o zgo!1El(Dp%6fe1HI}>b^^Ihf>Wo1zyrOA1c4DQ()Z=|!a;O#9s>0<7F%B#LjqBG;d z^axF77wL}Zw*>cjvFL*eH$-GbZX3Swpba_~GOTnJ?c5^Yn(ax_&f(Fz#M!f$28{s~{J3!~ zDp_1o;?ejxV`}WAEP7&#tra>F36xh+2C`yd)Zla#whSiMmY5b)O5^4tgw7ztYXY^`1~WF7D;|sBwIbgID^)m@-(>>66P}REHM(?N2H21K}5`!;|{a72~DlNeOe` zUn$c5n%XIkkcp_IH`&ROE=d)sgjw#N(@dBcWcS;A1+K@Y=hp!0b0TgY2yO+HQ9)+};4 zFc*x1SlZU!qy_+h;p|u?6%pOJQX8oQ^{s`!^Lqk-s~F9sjC29&pnpum&CLy1S#fjc zc@Itj=TqE%N#<|MH#MD3Toc3N;rq#bjf<}e+2}v$;|w0qx@ho$Uqw91>DlJR+)hMe zCeS#m%~_o<{^>?}%cnPR5qA33`dZ`?=M&Dlw_&U4v+Rm?u?>?f_TQUr zyuG?H1ctiFv*9~JI-0F`Zu`Hv7d3sR0r-B%;7*BLo1%y!ja{GD28`1}X2C4!=Tx{d z_boPbvLbEYdRVSseyc(^r-olX)qi<^lZPgQPr7LOeUaC9B>>xJk-0BMf*`Gb1H}2U z?2ynLSpVjD^y-|jwRgMM_CuYhJ9Odlu{S?IL3PhKf8nt^@Ui@Ew`cC*^dWHSzFF)k zV|@=Aco*0URe|2a@>Y%2;B$NEhmEJghk!K*r?wkizd^E zHCT3_^lNwp02bp$I;Bug=p7(K#@iddqOUsA*2k zxUX7nDONubTz{WFFkbX>8v7?#eDBPz8U<%DU82Xg;ip%*ie2b)f)QsC@LS7JFsr6*o&&D8NEN z11hhCW5GixD2bK{Lci;^{X_mSTRv9IVVm6F5^rxNH0fdX~A~XvNK8c(=6(E1<_yR@FcYdJux}g zDd5F=yXY~7!88aX@wb86+qUm&+Gc2Io`{IN zo8nxt=4gMP)YIu!)}uRfJidRH_$pAXRkB~exqcR$&WoQB7&|D|4mv&Q4fK+NVkQLo zBc7ft`#+BP`SIrmx=>IcsrI(NNC;tAUwsi$hZTnCiHCO;9NZmb03zaJ!+1v(KU7@b zW*1G0o;+Yd5!kwZbp6|V^qc?ggZJh6Q?5nb@%5vNp+hrN_5P;`-}?e@Ld&DMF`lz1 z@W<|WhSU(29zKz+X&hkSn|l z1u%`UV@aNr7jd_o%iTpFfSgf=c$UW8k1>+VV+!}5Mbxb5pj@T7ojZjY)aOwFAZe$< z>8kK5-k{3ajhM;=y+;FpATJ;+Qo~u?v!SH1bZ|LV zpG!Qnsj5=+@nC1hT;uY!6ZPB?$I4pUfU(8SEQ}i!B9XY5%8QQ~Hf}0GlmMFo@5KU$ zN4rJ4p7PcmTOaNpKfqSkk3L8AstnrhW)5qMhLMQ>u(!U@ z8Sw^yyzSpRhX85Fkj&cJT3iNGTwgL8a?m0^%dnpd8?*d#3joqhRO+CYnBaQUb689q zz;HraSmc=N>3LI|R6d{C^W#t;P~zo3oUzcvk1~8FMusVt&{fG)>%n$s-F@Kkm%01L-W3*q^Mt3~ z>#Iu$);vIy8j-XcW@!C!CwZ7Tx*~9lktQN%pfF5+KMV^=1s##Bk1DPoLcI450sX~0 zKsdLQ$HP8>(lu8-K(D~!J{h-wUp{Hh${fsRW(q=ecLj+9@Z%&loIjkE@tOBF=F1G~ zB~o5u>8QNAItOLne^Uw5)u)=SAYf8R`skx=qd@HXQ{0rDC5)!bq8>EN`B}VC9N_D) zH_Wn1Fx=g*V_C~}9DY)2IBLhK+SyK-c81idDizf-L&`00J)!4iBtIQJ`~9j)=5ILm zyp|4a!%o}2d?t#S8%s5>SoK-qbKl<|&OCCY5bD50k0XCm8y0rp}oUR9;a956+Lc!fR3)$)h+jG z@0@2GK8f9n7+gXr9u9NQHy5D8(ENNd$m{#6Uh+my?(TjlTHQ63V-W3b#ZpI1(l$LIv?f5ZbE z55~9mv;}GupNk~ro#DU`LvfN_PNjO97|#C?Gwlz6j;UZPSw8l^N_g=@MW2~o+}`{5 z?)|wr3eQl@^B<@rX98j@&ffNjT?2J&q>7rzywqP zOy%2?G{+KMz*_X958#0(7k>3&cm7`Nm#^phIpyEpUGBXshm<(GUOD<*P5=N(Ru8?W z1DM+e0Xg}ua6T3Ok8SfY#7;z5uMLkw2kd6@Bvi5bD+!pwbKxc!h|cG-|H4Xuwa0A0>bP2s=r* zx`*cZoYsjsyehrE3tVVEz5VFEU9$y!?7ckfk$#Z0`;=x<)jF0}U8JND+KI;L4EEzD zF8Lh;_YN406o^_N$?!p;W2`nIvL?Vdtp6IJR#24()CRv`UnQh%5?7@nj!;W8Ow4vz znnV315U+|i3&0`D`U^a;h((5HnVBB@{n%O9h^}Bg<>geRfY#Z#kk44f3A@*~&DvRg z!Ir?3{U|``A;I$-TR+XlHE8}7$&w{27m@Y$h-X9lW0nB4OH;0l-c+Ol#T;abK<~wo zxSzla=j*`Q%m=U*g`y6Mf0d3#C+>ili?{x$%~g;eAGNFjinD=*#-K<>@=(Rw2Y(Hh zAE=fXe$KLtmSgc%vObz1g$5~5z(~6E2)CBg#@^^2jO9!i zrPzFI4IfC8M+)o9?G#zSNmCE&k3CSw-d2+HsnM%*pVNi9BdDJD9^ZDn<*qYqJ!{lu z0{4pxj0vZHm0f-cpYcJP%W`h@)k~dzg@zrM9H~s3@J9Ufj`0miK9R*A{gTk7dwRa# zVi&%dQws0_aI@y4Qu{N+)GSj9{Tr@N-18f$LNCPu?o%GC)TmtI+k5#l_h}TQy#9!@ z6CHfa&k!#q+LPhzP&F=G25@fl^lzVqwXkaWM4{sOCZ1?gk5r0RqFN!w?y-vysC7Nf!S=N`PQ zCKt6$g&Y7#mLveuwhai==Se}_(4P}j@Xb@@%>cMh;+I(;KzKfCn8%-qJEt>mjOWHBF>zn|?hf!Kfk&?|4S%~^Z& zVnsbbJ$#(k&LLO=P9O|Ewh|U8BpK7BB{FSVw7Bh)y50wWsL!^3Z_~}!%lkmbr+2e^ z@qWj8KkJ}JfmaJ+lrOAZ-bP$jwy9QYFBY*%w+SxkCNa)OT5a1!M%RZ#3QB&JGo4{B z_w^8i)GND^r0c|AzSb};u$2&>L6p;u%wRLmeHdtB7D%#-Y$cPK8Zr8_^4APJ~q}5%Gm>(pBby=>2L$D1M=%& zBl^~Nb$y|?l}YTk@%@{mT-S<;z{mWjRnIYy>l+-#q;Xp&Ta9)@(+2MVTh9#t6n3(> zsGEe2U(w4g{+*;tJMdS^%B*-OQoeCbeMf$<1`+}Af<^^zuur+Hs&Jl6<^`NDjIJDyuEdMo+y_1gH@gmhV5!pygt|gU<2?W~*=SYs zWEx~-sn}mK^lCjSK6Gu|vP&Ln6)iud53>n0KF?0m>}#)Q)L97NqL1L|rGMg`b42nb ztHF0PUJBKB-!u7pz5%?@8tM@Y?bl8P06|12p)Y@Vd)g8;>gYusWsE;8`v0Xfr3Apa zz3Db~!dq9YOFf@)W^ru_0IfKikE_%QVFB}_F(=T;;z2$xv`x(^HdD zG@ajnOU(GnM*1gTKH-l0?Z(DeBF?QE5efkzX&a(t!ObTi7*A14k(%b2RzUdhZDri* zf%~e%z}|VwDp;ZB;VL^;b}eaBJayWZ|a_L z>nU_4!MpaNdg0|={xhWsc)&`T1e(5*d<3@1L@iy)IKiipl^^V8Z#twv)VUtjC9-`Z zh0UR_SQAke5hFiw7KR~+W1$AXHN5%#5j7ry$YyOSUf>%?h@oAS!^a7+Amc5hw?`G$ zRAqq=gC7In=Btas%{zGt4Qd-&qQ!)^TbH_jcxaX2gN$tDSndLmy8o(@wcz~FaHLeX zG$8AU7bm^4-17?aAt0DDt~qHvpuc-Cvp%u#X3}II*uMU$9P1?aZSjCrf2`E z@f&MBz>623_vS4SWHK{Cgb77iO${p|J#ZiUyl*a!>$umKri@@A@;g@RuuG|;~txR7#1pBTM3!S6d9H~iXLja2^Cs!Z&*IE>j zAD#ZD%aM4AtMw@lJ^14RHbtp$Al(mt8YMcd(QP<cIQYnH)V69J5R$q8yE$on#&XeB1O2g7oWP^E(gwU*|#MfXQnr za#?jQ?mASm0)_0idqR?&gn46$1mXfe;{eyco0)w}j-pI?>SC=oPZbe$na+wv-o$bX z6n6BQAC7)T0Fu&5G}YfWydTS+na2IW#ifRSwl$?Q+%mS;srb2hFB$?Mdm1By;9#3@ z7MAEing)StMVWxQnPVtLAnb{(K#eQk!PD!&6=;itvA^qR;JcIFN3p|EVZc{7GhG-( z-lKKeAYVVQFL|6>yN)o?ZgED`_0{jLMLoF%f{R0}!VdwG9QCO=1e`3{Dzx?w zrcXjolMk;;V!nh-yH9#h934;LFHvNe3wyl*>e-?umfp3|j z4y|^3ga6~6;jmEYrdp|HE+ry&C=wdydCawaFr*J&#*NaGzjHv zOuIr4{|)142P&Hf2|s~*kMzJ+*f*pkbOaxA#kJV1>iq%S9M)c?j1LFwnzFnvU&cu@ z)A6FQ9pRJ$yuZ(!QC}yf=`R1X>*RQ)qpj}e&Zc-UDp=GulJK$u05F^*U`(@>v4Eq$ zn?4^7!3?o+)zkWu7}V-1kl4sMz|Z0eYCHp-9E>{N>*g~e4}9!>vh)EwG*QVW_vSq& z$vwliKdlZ$Pb!JgfHA?uHLYYA<|HL#5?@|EgnMU!X48}VNNo>T;QrBPT@erf)YVD@OagWWz*`TDl z;ubJj$jcJT>613NW+UhkMWHsKIroeJ&UeC|xe|1*?{g}}UWlFJl9Rh!n4Xf`G0frr zB+6&|gouGr&WT{?po9x&&~E{G*6vVcWmU|yi|f@df9bIoSg}NeR(+qGVK!Ps1%&dv zFOZ5KD-6=rjDf9Vub{`!%_p(ZIQO?b4DsE)+vm_U#HZ1a|XfAy_bGxjLl@W4e`>Qgu#ANf+IQ(muIBGSJY;-&#IRzUpb|WHQQAW|1 zLGmQ|qD?Yxy0ct;rbAWB?@9F2m^BYXKU|9XHs0pX&ArPkA9o)PVPtXIxov^y68kYB zs*Q0MYEnC(Q%rUMR@A4<%az{ggAgIPcL z-c&b{5+CQTSG9LHa4%=`{NeQK(e(&=1U*@Zvl3cgsCoStdKY*U_&(E4J%V#=J09ea zj9tN-Xm%i%)8t-G|HdCRZoW5(Cbmj%F7O;54PJ#nyI*xmJl&6BBwwq}}BNHglrZ?$zYZNWf*I26MqmzP9!=1uvO5T(3LGw#= zWful~dT>^FPr|F2T+aS4AlYPscjyQpAT2|i`u@e1fuk?+c3k6out+Xi=rWE!?>y{~ zp^b+(5@kaclO+uUeyX*JUbJH~D-2`6-+4P`fkM;XIeguctwTpOFL519dkWh2+!g*K zdX_Q?s9!rD1V#f_t~X(4m9di$5CH26Qrqw^g@w9ml=!$}#=X$i+lSuUhr?}_EmzO` zz!~WJ&$;zCu6y&&#!u__yM4q=xWY5kOme@A2)sr1t6H>hqwYVcg0(*(k!p#*Xo|E6 z&t$P)u&YGue`6d}@P6kgn0ipSfvzJ)4teU65);vIq*ux6WvkH(MbMGK!Z3!o(70iu z;_w+fJ>qz|l?Vh-?(d)i*XkX*np6#|V2!mY*%ciy%t;Ba^L&RO#WRUhmD+ob;c0xz zw3YetD5gWHRzgUCluB;!@rf2Aj0pqH)^yGNTZjmR?{EZ?I& z5;SKV2$pD1cCLg(#wbcAEx@)W#(@cJ^r14N6j=BDjlf|U&}C0Jpu|Qmv?w_9+aD8u zX53Myj=(aH0h%w=?CT5LuOe-yK%Omo0jV9#$Uqihst@j+$)AC8KwqpsIP`QMZ*{o` zirqkOU%T$*8+q;xPZ8EZW1t@v;r33z76S_1@mY#QXor;2pT~Yx%s0@NECC5{QQO@p zYj_P1Y`MMWa8tk`8hs*HQ=28JNgCSxk#KSpLR`Q*^!^38s2b^R_ER8P2;8egSEcwS zbw=LEQ0ay_Yf1tDR3eY|aPaz9$FCNz*P&ri9hsJAQqh83y=4~Q7}j~KzPDIWhw<>` z(Pi}l$rWku8luZ(Ma=m+%261jczH%s=ty)#8TLuAR!)to=C2W*c?Ug2^_h>ii7iu| zH&_tYbBi%!;UbbAZ&xJ^X&rR&kRYEcrNdv5M02VEyj*b?c6`-v3NbDOl=K5DJ=B+k zGElp4^zS7HrP&%!f89snWl<8C9IF-fD^N!>XQsrxUB)olP8VGk0fAeeD{&BoU34rG2#`}PqXV!_rrwd{8DQsgC3TLKzdi9il|CI0e zc)QIFd$yT>Wd&VGW5E1WQ%V11D9%YHvY0Z0{$qYy{##A<-q!9g(EwU=Brv^DnOb*{4M= zND%qAoSOL@DkR|wcP>tJne8^BjAk`2>4vp;A-u}oe%d6NDP%!UV-ZK~fhcOB0l4hU zCS?quc6~c}X8p#t&QrcaXXb@{#8fvC=-BjM&T%jsWI)fWMQqflZKxge-uSHd;&}P` z_~H2I=>v4|0{ZlD?7o`zNzfB|6?i=*{=(=j@0ZZuwsaeczuNF?D@l>PeNLIYgLPCV zR!=!1Pm0N9rd}ZX;04VV5NUerTQF)&n@_w8+6bvc12~-!Mz_E&cDnJn^|v-%*M3XvHS|$x!^5= zw2g+1WTH2sMU~<0(j%WL{0P8HLE3^`n2(hOtrS<~?BAcs3*@^G_dlw(@1N1{5EN&9 z-}aG35z*t@kTbx_VN2Wy{SJ02vqL5>4Ci!W)FLa)NS6tkf=6Mp1eGbs5@S`Ti>F;W zed^R&`(ODK0l=9w-Vky5xDu{&c$Jg#&;X}d#7@OO^Ndl zvl;-$^c{gSv$GTapJPs>^Vly6U@v8##jf*>Sy1f;aQ2TqNBXk$nNpmZ_Q~~Wz3Or= z;O9(F^MOC~4(bS9%i|F|^z)qAy@B<{-L*J4Nt&Bc=m_B|+Ogtt*o=PPxVP!62V zQF*YdH7E>|Nuf1Uy3h#%Ahs1D_Q?o2{vFz+e@{0_{@$CWn0)Si8x>fy9hv=nea7~( zNisId;;kZr)^~Kyfm*V@S&gs)(PqB(f|smFEU%WEYY%ZND7a-^dNTTtaNK`cE0oa@ znZPAhYcTXF40hZ3+%?nIRy{C9j%|7GWG9k418^5=6 zb2xKOC=9Vb&t1jXl9asvE)X1yeOC0 zyMB1;P1xRE$*1r>zpOia2)zIFdvN&IarN~<;A7q6c|A%4Ub1K7kYU`SwWVN6&6b2W zi&`nwK5fwc1H^8rS z)a3R~Vg<`q{o=CPrjv-nqymzridlq{q$9h}qW&>On@0rzLarJ7Xv!D)?G--+kkUr8 zYMsa5`1Lc=)|M((a@3HU)o-6<$v|&p1 z|4i`tWEIhly*lO9ikoj8zYkaYb9rGDM~n_cg(rf4N1Y462Ei@xQk98%o{mDs`-Q`Jz72M@8Ch4TVlJ7*(J8lJEwN*^OlBO>TSPJD zv~8i-aN5dLi(A~BASSyb{-rc6tJ*1AoIwmn0+S_N4YG)-s%A}T9=b$poeV+7D7i!z zi(85%k%&k-L;WZ(0K`xo8epc>-vK8Dk+-F(UcvGCHQP)VG?VmR%anj#%LsSF2bZ@C zBtFokMubqweAd@SQRjv`!9GvP&8y&w_g$Q`iR5;3Y?^gF-bn1O?M2*mUP6A29G_ZE zU~$v>`sGA@Xn`RKvN#xj2Wi5;-jQNo0UPw;-_ z!t+flc$}r{CNp_}zvZwm60>Gfa=}l&gdmwNWup!qkqglct631y^Ly>-IA{nd+JV2Y zZ=>yD|1uZ?pj7r_Am^j(eIBZBs#n;#eLt4>Qk}6t@!e}eLH&e%MvW5rkzSnXt^(cy zxL77aM}cq)Y)J-W-zuK1GwukI`X7{hPGuN4Ev+`mvT5hH_cS`!#&rUUGha9&7SyL^ zl9#C9=AJ7Ehu3u1C=vaz&_9Owkc}ZI%x?{jTAk952oHwc<=yrDX3@X&epv8!}Wb)o=SAr%XfJk~doL@THfUTOdU5=yi`5mfcU z8@F)K#-+wg5Z3*q$VKTkZVC+tn6&v=eq zZ+v1~&eL`Pbd16p@mU>)10k>L{7QdW?-X~4fs@_d{#G8H#MSSm3H2E?H$8f3nOCkU zp#?+(VFcoDweUL%xPsBFgQL)7+lwisI;$MR&4^fi>@KU22^D8L5)WxM5IN#nAcR>C zgbNQsqXDJ1PB_kex@g|*Iaqxlf(bgIW*n-UXGY0@10FUEHT*~^8|Ot$emXp)f%T{L zi^tw8=*;PT+pfQAeq5kqKJ@tBt8!O|{L-j4@Dj)niVWwb{oQY;G^mpF$wrq+e!=dT z+mSw+)``m|7k_E#5D2smt-!4izmdzVSKDq_93Tmf*Cv-bXCmv~j_SwT;R;2Mp>mDJ z`IK{$0Ri9$s=;A{Q6A*42`xgs5M1^3JR%O)(EciVdN3%Jb!?Mn>1&UI`mxI(ASbyK zfLR+<662dM*MWnC#|aQ>xa}{Hj4=?jcBKaj7i02}A|(`s1QveJ&^c~j^W_x}MP#B? z)_q~FOCvVA8GZ+^3~(w8IK5^8Vx6}?%QE4Gi*}Z74|-#n&W62?v<9sdnIP`kUe$^w zSZ`Ysc;ew8N@y@4cboxuuH!u0?6cJn7;VVF#=o?|Tc_yedJrZoocBJTX77D;@2y&g zy_TF0Exz+R>~UW=yw_Q&fc6nbA3#*f7?@XQx3s6mSCW12+(iecw+)cAtjgD*FHsm7 zd0!lkfg=9~e8{F-%5`C5R1YtjNk`LI&Z^75iY^EG3K`_{SyGA&RdfLm>uz;;)$m-% zSb;@S21CVW+dY*>*wBV{L!C5_-zVqNN!sDxApoK*B7q5d_B>F0?yf9-9?*U-+RISB z1aKN10S4`4_VEnpr#V~fCM@JQ`T_#IMNi!=7mIW?i#o!yynWqoP#qvEdff*?e?IO!-2QBxzs+BT z6MP)Xh^8g2JN^USAe(g6pZ-;TUztwJ5{lPK9W{f=IYW$L+nbU8!Eae9K0RE%X5#4( zBUrsE#e>$&*?3)XtHDm7Z{B(^2~}!YU(LF*BeEWNF^r8RoyYIAv>Fk}-fl;k*3~-< zzA=dXCg$Rh4Yck6&oWNM|Il;x{uaH)NOX16dzecp_Hbd-8-} zo`%CjcD+bXpj~!wNZKQ2wGZz!fEX%C;Y+8X9se&$XE2grTeGi>_3f-|f$K*PzCOJd zr@IH;y?mEPVs}tzZ|m>s<9MIrx93NJCqQ+%hJgN%or@2>#yFor>M?j(uA$cdH z+R@vrN+m8DF4U?^-8(S#;;j;LM0Lrs2>*Ry;hYkgMX>RS8O?2)R)yOviX*6bd@_Ru z?%?F0Eitd^0?I<#)BgmI=}6J6jd|7$S+n!;cJ`;3cU`T3bo?M~9RB{WeHw?k^s2CG zPKTpm6PTW)e-H=YC6dDO9!d7&1~E{NtNwI6$&&U)+|#hdU;~(phP47h@GLPjXqlNS zg+df8r{efg0-HTET7Y#-E#vl z++FM_{|$p2T;0sWNqz!B)jYot@5Wy(gzW^jy2m~V@AaZqc`!6ZacV>hs!1EPkuo{>@ z2Vm}5{-3me{3ckM&p#>ue!;E*CBQnd0fsObs$i&r;VlfX9B?qo;K*QkRAFF;ffoi@ z7!+XOhXIxkBn5*63~yj~mcN5R4+fZ0fM6?eko$l97?|$_1580dZvXM0!_hE!!e9x5 z8Vs*sV1j`T1}<17@Lb)@|FN5}@N-*WuMvRM+%4QpVZOY*<+J~fqy9g;8>S-v)Aj#J z2S6k?cCN6Jfi555?He^o zSTGd!hY!!CJ=X%D_-}xVhxdPyo^uFF@bLVP;Qwa;RxDSAg(>WHQ-JCJ ztP3|MmjD|l9~&2+CYOK^kAM+|%c5GN-rBVC1?<1>Ba|K|nDrXKJ4Ua<@Cuyb*8 F{x65=vylJ* literal 0 HcmV?d00001 diff --git a/speech_to_text/example/ios/Flutter/AppFrameworkInfo.plist b/speech_to_text/example/ios/Flutter/AppFrameworkInfo.plist new file mode 100644 index 00000000..6b4c0f78 --- /dev/null +++ b/speech_to_text/example/ios/Flutter/AppFrameworkInfo.plist @@ -0,0 +1,26 @@ + + + + + CFBundleDevelopmentRegion + $(DEVELOPMENT_LANGUAGE) + CFBundleExecutable + App + CFBundleIdentifier + io.flutter.flutter.app + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + App + CFBundlePackageType + FMWK + CFBundleShortVersionString + 1.0 + CFBundleSignature + ???? + CFBundleVersion + 1.0 + MinimumOSVersion + 8.0 + + diff --git a/speech_to_text/example/ios/Flutter/Debug.xcconfig b/speech_to_text/example/ios/Flutter/Debug.xcconfig new file mode 100644 index 00000000..e8efba11 --- /dev/null +++ b/speech_to_text/example/ios/Flutter/Debug.xcconfig @@ -0,0 +1,2 @@ +#include "Pods/Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig" +#include "Generated.xcconfig" diff --git a/speech_to_text/example/ios/Flutter/Flutter.podspec b/speech_to_text/example/ios/Flutter/Flutter.podspec new file mode 100644 index 00000000..5ca30416 --- /dev/null +++ b/speech_to_text/example/ios/Flutter/Flutter.podspec @@ -0,0 +1,18 @@ +# +# NOTE: This podspec is NOT to be published. It is only used as a local source! +# + +Pod::Spec.new do |s| + s.name = 'Flutter' + s.version = '1.0.0' + s.summary = 'High-performance, high-fidelity mobile apps.' + s.description = <<-DESC +Flutter provides an easy and productive way to build and deploy high-performance mobile apps for Android and iOS. + DESC + s.homepage = 'https://flutter.io' + s.license = { :type => 'MIT' } + s.author = { 'Flutter Dev Team' => 'flutter-dev@googlegroups.com' } + s.source = { :git => 'https://github.com/flutter/engine', :tag => s.version.to_s } + s.ios.deployment_target = '8.0' + s.vendored_frameworks = 'Flutter.framework' +end diff --git a/speech_to_text/example/ios/Flutter/Release.xcconfig b/speech_to_text/example/ios/Flutter/Release.xcconfig new file mode 100644 index 00000000..399e9340 --- /dev/null +++ b/speech_to_text/example/ios/Flutter/Release.xcconfig @@ -0,0 +1,2 @@ +#include "Pods/Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig" +#include "Generated.xcconfig" diff --git a/speech_to_text/example/ios/Podfile b/speech_to_text/example/ios/Podfile new file mode 100644 index 00000000..ab7d5b46 --- /dev/null +++ b/speech_to_text/example/ios/Podfile @@ -0,0 +1,90 @@ +# Uncomment this line to define a global platform for your project +platform :ios, '10.0' + +# CocoaPods analytics sends network stats synchronously affecting flutter build latency. +ENV['COCOAPODS_DISABLE_STATS'] = 'true' + +project 'Runner', { + 'Debug' => :debug, + 'Profile' => :release, + 'Release' => :release, +} + +def parse_KV_file(file, separator='=') + file_abs_path = File.expand_path(file) + if !File.exists? file_abs_path + return []; + end + generated_key_values = {} + skip_line_start_symbols = ["#", "/"] + File.foreach(file_abs_path) do |line| + next if skip_line_start_symbols.any? { |symbol| line =~ /^\s*#{symbol}/ } + plugin = line.split(pattern=separator) + if plugin.length == 2 + podname = plugin[0].strip() + path = plugin[1].strip() + podpath = File.expand_path("#{path}", file_abs_path) + generated_key_values[podname] = podpath + else + puts "Invalid plugin specification: #{line}" + end + end + generated_key_values +end + +target 'Runner' do + use_frameworks! + use_modular_headers! + + # Flutter Pod + + copied_flutter_dir = File.join(__dir__, 'Flutter') + copied_framework_path = File.join(copied_flutter_dir, 'Flutter.framework') + copied_podspec_path = File.join(copied_flutter_dir, 'Flutter.podspec') + unless File.exist?(copied_framework_path) && File.exist?(copied_podspec_path) + # Copy Flutter.framework and Flutter.podspec to Flutter/ to have something to link against if the xcode backend script has not run yet. + # That script will copy the correct debug/profile/release version of the framework based on the currently selected Xcode configuration. + # CocoaPods will not embed the framework on pod install (before any build phases can generate) if the dylib does not exist. + + generated_xcode_build_settings_path = File.join(copied_flutter_dir, 'Generated.xcconfig') + unless File.exist?(generated_xcode_build_settings_path) + raise "Generated.xcconfig must exist. If you're running pod install manually, make sure flutter pub get is executed first" + end + generated_xcode_build_settings = parse_KV_file(generated_xcode_build_settings_path) + cached_framework_dir = generated_xcode_build_settings['FLUTTER_FRAMEWORK_DIR']; + + unless File.exist?(copied_framework_path) + FileUtils.cp_r(File.join(cached_framework_dir, 'Flutter.framework'), copied_flutter_dir) + end + unless File.exist?(copied_podspec_path) + FileUtils.cp(File.join(cached_framework_dir, 'Flutter.podspec'), copied_flutter_dir) + end + end + + # Keep pod path relative so it can be checked into Podfile.lock. + pod 'Flutter', :path => 'Flutter' + + # Plugin Pods + + # Prepare symlinks folder. We use symlinks to avoid having Podfile.lock + # referring to absolute paths on developers' machines. + system('rm -rf .symlinks') + system('mkdir -p .symlinks/plugins') + plugin_pods = parse_KV_file('../.flutter-plugins') + plugin_pods.each do |name, path| + symlink = File.join('.symlinks', 'plugins', name) + File.symlink(path, symlink) + pod name, :path => File.join(symlink, 'ios') + end +end + +# Prevent Cocoapods from embedding a second Flutter framework and causing an error with the new Xcode build system. +install! 'cocoapods', :disable_input_output_paths => true + +post_install do |installer| + installer.pods_project.targets.each do |target| + target.build_configurations.each do |config| + config.build_settings['ENABLE_BITCODE'] = 'NO' + end + end +end diff --git a/speech_to_text/example/ios/Podfile.lock b/speech_to_text/example/ios/Podfile.lock new file mode 100644 index 00000000..60c8b57d --- /dev/null +++ b/speech_to_text/example/ios/Podfile.lock @@ -0,0 +1,29 @@ +PODS: + - Flutter (1.0.0) + - speech_to_text (0.0.1): + - Flutter + - Try + - Try (2.1.1) + +DEPENDENCIES: + - Flutter (from `Flutter`) + - speech_to_text (from `.symlinks/plugins/speech_to_text/ios`) + +SPEC REPOS: + trunk: + - Try + +EXTERNAL SOURCES: + Flutter: + :path: Flutter + speech_to_text: + :path: ".symlinks/plugins/speech_to_text/ios" + +SPEC CHECKSUMS: + Flutter: 0e3d915762c693b495b44d77113d4970485de6ec + speech_to_text: b43a7d99aef037bd758ed8e45d79bbac035d2dfe + Try: 5ef669ae832617b3cee58cb2c6f99fb767a4ff96 + +PODFILE CHECKSUM: 0ba44ad07df4ab62269dc769727cf0f12b1e453d + +COCOAPODS: 1.9.3 diff --git a/speech_to_text/example/ios/Runner.xcodeproj/project.pbxproj b/speech_to_text/example/ios/Runner.xcodeproj/project.pbxproj new file mode 100644 index 00000000..c40af650 --- /dev/null +++ b/speech_to_text/example/ios/Runner.xcodeproj/project.pbxproj @@ -0,0 +1,578 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 46; + objects = { + +/* Begin PBXBuildFile section */ + 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */ = {isa = PBXBuildFile; fileRef = 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */; }; + 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; }; + 74858FAF1ED2DC5600515810 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 74858FAE1ED2DC5600515810 /* AppDelegate.swift */; }; + 9740EEB41CF90195004384FC /* Debug.xcconfig in Resources */ = {isa = PBXBuildFile; fileRef = 9740EEB21CF90195004384FC /* Debug.xcconfig */; }; + 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; }; + 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FD1CF9000F007C117D /* Assets.xcassets */; }; + 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */; }; + C446300A034BF27D9F1ACEF9 /* Pods_Runner.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = E76E9615C6B4FABD88067D55 /* Pods_Runner.framework */; }; +/* End PBXBuildFile section */ + +/* Begin PBXCopyFilesBuildPhase section */ + 9705A1C41CF9048500538489 /* Embed Frameworks */ = { + isa = PBXCopyFilesBuildPhase; + buildActionMask = 2147483647; + dstPath = ""; + dstSubfolderSpec = 10; + files = ( + ); + name = "Embed Frameworks"; + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXCopyFilesBuildPhase section */ + +/* Begin PBXFileReference section */ + 1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GeneratedPluginRegistrant.h; sourceTree = ""; }; + 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = GeneratedPluginRegistrant.m; sourceTree = ""; }; + 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = ""; }; + 59AFE6BB0B596A0E0811BDFF /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = ""; }; + 6280E2A777726D2043BF80B7 /* Pods-Runner.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.release.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.release.xcconfig"; sourceTree = ""; }; + 74858FAD1ED2DC5600515810 /* Runner-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "Runner-Bridging-Header.h"; sourceTree = ""; }; + 74858FAE1ED2DC5600515810 /* AppDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; + 7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = ""; }; + 9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = ""; }; + 9740EEB31CF90195004384FC /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = ""; }; + 97C146EE1CF9000F007C117D /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; }; + 97C146FB1CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; + 97C146FD1CF9000F007C117D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + 97C147001CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; + 97C147021CF9000F007C117D /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; + C3909A4B7EC98A20255210E3 /* Pods-Runner.profile.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.profile.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.profile.xcconfig"; sourceTree = ""; }; + E76E9615C6B4FABD88067D55 /* Pods_Runner.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_Runner.framework; sourceTree = BUILT_PRODUCTS_DIR; }; +/* End PBXFileReference section */ + +/* Begin PBXFrameworksBuildPhase section */ + 97C146EB1CF9000F007C117D /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + C446300A034BF27D9F1ACEF9 /* Pods_Runner.framework in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + 7937AF765430D66F28F7FEEF /* Frameworks */ = { + isa = PBXGroup; + children = ( + E76E9615C6B4FABD88067D55 /* Pods_Runner.framework */, + ); + name = Frameworks; + sourceTree = ""; + }; + 9740EEB11CF90186004384FC /* Flutter */ = { + isa = PBXGroup; + children = ( + 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */, + 9740EEB21CF90195004384FC /* Debug.xcconfig */, + 7AFA3C8E1D35360C0083082E /* Release.xcconfig */, + 9740EEB31CF90195004384FC /* Generated.xcconfig */, + ); + name = Flutter; + sourceTree = ""; + }; + 97C146E51CF9000F007C117D = { + isa = PBXGroup; + children = ( + 9740EEB11CF90186004384FC /* Flutter */, + 97C146F01CF9000F007C117D /* Runner */, + 97C146EF1CF9000F007C117D /* Products */, + A68CCF1640763A551D35BD31 /* Pods */, + 7937AF765430D66F28F7FEEF /* Frameworks */, + ); + sourceTree = ""; + }; + 97C146EF1CF9000F007C117D /* Products */ = { + isa = PBXGroup; + children = ( + 97C146EE1CF9000F007C117D /* Runner.app */, + ); + name = Products; + sourceTree = ""; + }; + 97C146F01CF9000F007C117D /* Runner */ = { + isa = PBXGroup; + children = ( + 97C146FA1CF9000F007C117D /* Main.storyboard */, + 97C146FD1CF9000F007C117D /* Assets.xcassets */, + 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */, + 97C147021CF9000F007C117D /* Info.plist */, + 97C146F11CF9000F007C117D /* Supporting Files */, + 1498D2321E8E86230040F4C2 /* GeneratedPluginRegistrant.h */, + 1498D2331E8E89220040F4C2 /* GeneratedPluginRegistrant.m */, + 74858FAE1ED2DC5600515810 /* AppDelegate.swift */, + 74858FAD1ED2DC5600515810 /* Runner-Bridging-Header.h */, + ); + path = Runner; + sourceTree = ""; + }; + 97C146F11CF9000F007C117D /* Supporting Files */ = { + isa = PBXGroup; + children = ( + ); + name = "Supporting Files"; + sourceTree = ""; + }; + A68CCF1640763A551D35BD31 /* Pods */ = { + isa = PBXGroup; + children = ( + 59AFE6BB0B596A0E0811BDFF /* Pods-Runner.debug.xcconfig */, + 6280E2A777726D2043BF80B7 /* Pods-Runner.release.xcconfig */, + C3909A4B7EC98A20255210E3 /* Pods-Runner.profile.xcconfig */, + ); + path = Pods; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + 97C146ED1CF9000F007C117D /* Runner */ = { + isa = PBXNativeTarget; + buildConfigurationList = 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */; + buildPhases = ( + 949FCB95217187F2C022D6A9 /* [CP] Check Pods Manifest.lock */, + 9740EEB61CF901F6004384FC /* Run Script */, + 97C146EA1CF9000F007C117D /* Sources */, + 97C146EB1CF9000F007C117D /* Frameworks */, + 97C146EC1CF9000F007C117D /* Resources */, + 9705A1C41CF9048500538489 /* Embed Frameworks */, + 3B06AD1E1E4923F5004D2608 /* Thin Binary */, + 8B0988F04B6AE44AA0304FEF /* [CP] Embed Pods Frameworks */, + ); + buildRules = ( + ); + dependencies = ( + ); + name = Runner; + productName = Runner; + productReference = 97C146EE1CF9000F007C117D /* Runner.app */; + productType = "com.apple.product-type.application"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + 97C146E61CF9000F007C117D /* Project object */ = { + isa = PBXProject; + attributes = { + LastUpgradeCheck = 1020; + ORGANIZATIONNAME = "The Chromium Authors"; + TargetAttributes = { + 97C146ED1CF9000F007C117D = { + CreatedOnToolsVersion = 7.3.1; + DevelopmentTeam = 3X949YE9K2; + LastSwiftMigration = 0910; + }; + }; + }; + buildConfigurationList = 97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */; + compatibilityVersion = "Xcode 3.2"; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = 97C146E51CF9000F007C117D; + productRefGroup = 97C146EF1CF9000F007C117D /* Products */; + projectDirPath = ""; + projectRoot = ""; + targets = ( + 97C146ED1CF9000F007C117D /* Runner */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXResourcesBuildPhase section */ + 97C146EC1CF9000F007C117D /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 97C147011CF9000F007C117D /* LaunchScreen.storyboard in Resources */, + 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */, + 9740EEB41CF90195004384FC /* Debug.xcconfig in Resources */, + 97C146FE1CF9000F007C117D /* Assets.xcassets in Resources */, + 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXShellScriptBuildPhase section */ + 3B06AD1E1E4923F5004D2608 /* Thin Binary */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputPaths = ( + ); + name = "Thin Binary"; + outputPaths = ( + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin"; + }; + 8B0988F04B6AE44AA0304FEF /* [CP] Embed Pods Frameworks */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputPaths = ( + ); + name = "[CP] Embed Pods Frameworks"; + outputPaths = ( + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n"; + showEnvVarsInLog = 0; + }; + 949FCB95217187F2C022D6A9 /* [CP] Check Pods Manifest.lock */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + ); + inputPaths = ( + "${PODS_PODFILE_DIR_PATH}/Podfile.lock", + "${PODS_ROOT}/Manifest.lock", + ); + name = "[CP] Check Pods Manifest.lock"; + outputFileListPaths = ( + ); + outputPaths = ( + "$(DERIVED_FILE_DIR)/Pods-Runner-checkManifestLockResult.txt", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; + showEnvVarsInLog = 0; + }; + 9740EEB61CF901F6004384FC /* Run Script */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputPaths = ( + ); + name = "Run Script"; + outputPaths = ( + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build"; + }; +/* End PBXShellScriptBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + 97C146EA1CF9000F007C117D /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 74858FAF1ED2DC5600515810 /* AppDelegate.swift in Sources */, + 1498D2341E8E89220040F4C2 /* GeneratedPluginRegistrant.m in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin PBXVariantGroup section */ + 97C146FA1CF9000F007C117D /* Main.storyboard */ = { + isa = PBXVariantGroup; + children = ( + 97C146FB1CF9000F007C117D /* Base */, + ); + name = Main.storyboard; + sourceTree = ""; + }; + 97C146FF1CF9000F007C117D /* LaunchScreen.storyboard */ = { + isa = PBXVariantGroup; + children = ( + 97C147001CF9000F007C117D /* Base */, + ); + name = LaunchScreen.storyboard; + sourceTree = ""; + }; +/* End PBXVariantGroup section */ + +/* Begin XCBuildConfiguration section */ + 249021D3217E4FDB00AE95B9 /* Profile */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu99; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 8.0; + MTL_ENABLE_DEBUG_INFO = NO; + SDKROOT = iphoneos; + TARGETED_DEVICE_FAMILY = "1,2"; + VALIDATE_PRODUCT = YES; + }; + name = Profile; + }; + 249021D4217E4FDB00AE95B9 /* Profile */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CLANG_ENABLE_MODULES = YES; + CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; + DEVELOPMENT_TEAM = 3X949YE9K2; + ENABLE_BITCODE = NO; + FRAMEWORK_SEARCH_PATHS = ( + "$(inherited)", + "$(PROJECT_DIR)/Flutter", + ); + INFOPLIST_FILE = Runner/Info.plist; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; + LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + LIBRARY_SEARCH_PATHS = ( + "$(inherited)", + "$(PROJECT_DIR)/Flutter", + ); + PRODUCT_BUNDLE_IDENTIFIER = com.csdcorp.speechToTextExample; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h"; + SWIFT_VERSION = 5.0; + VERSIONING_SYSTEM = "apple-generic"; + }; + name = Profile; + }; + 97C147031CF9000F007C117D /* Debug */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = dwarf; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + GCC_C_LANGUAGE_STANDARD = gnu99; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 8.0; + MTL_ENABLE_DEBUG_INFO = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = iphoneos; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Debug; + }; + 97C147041CF9000F007C117D /* Release */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu99; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 8.0; + MTL_ENABLE_DEBUG_INFO = NO; + SDKROOT = iphoneos; + SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; + TARGETED_DEVICE_FAMILY = "1,2"; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; + 97C147061CF9000F007C117D /* Debug */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 9740EEB21CF90195004384FC /* Debug.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CLANG_ENABLE_MODULES = YES; + CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; + DEVELOPMENT_TEAM = 3X949YE9K2; + ENABLE_BITCODE = NO; + FRAMEWORK_SEARCH_PATHS = ( + "$(inherited)", + "$(PROJECT_DIR)/Flutter", + ); + INFOPLIST_FILE = Runner/Info.plist; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; + LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + LIBRARY_SEARCH_PATHS = ( + "$(inherited)", + "$(PROJECT_DIR)/Flutter", + ); + PRODUCT_BUNDLE_IDENTIFIER = com.csdcorp.speechToTextExample; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h"; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + SWIFT_VERSION = 5.0; + VERSIONING_SYSTEM = "apple-generic"; + }; + name = Debug; + }; + 97C147071CF9000F007C117D /* Release */ = { + isa = XCBuildConfiguration; + baseConfigurationReference = 7AFA3C8E1D35360C0083082E /* Release.xcconfig */; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CLANG_ENABLE_MODULES = YES; + CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; + DEVELOPMENT_TEAM = 3X949YE9K2; + ENABLE_BITCODE = NO; + FRAMEWORK_SEARCH_PATHS = ( + "$(inherited)", + "$(PROJECT_DIR)/Flutter", + ); + INFOPLIST_FILE = Runner/Info.plist; + IPHONEOS_DEPLOYMENT_TARGET = 10.0; + LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + LIBRARY_SEARCH_PATHS = ( + "$(inherited)", + "$(PROJECT_DIR)/Flutter", + ); + PRODUCT_BUNDLE_IDENTIFIER = com.csdcorp.speechToTextExample; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h"; + SWIFT_VERSION = 5.0; + VERSIONING_SYSTEM = "apple-generic"; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + 97C146E91CF9000F007C117D /* Build configuration list for PBXProject "Runner" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 97C147031CF9000F007C117D /* Debug */, + 97C147041CF9000F007C117D /* Release */, + 249021D3217E4FDB00AE95B9 /* Profile */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 97C147051CF9000F007C117D /* Build configuration list for PBXNativeTarget "Runner" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 97C147061CF9000F007C117D /* Debug */, + 97C147071CF9000F007C117D /* Release */, + 249021D4217E4FDB00AE95B9 /* Profile */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + }; + rootObject = 97C146E61CF9000F007C117D /* Project object */; +} diff --git a/speech_to_text/example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/speech_to_text/example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata new file mode 100644 index 00000000..1d526a16 --- /dev/null +++ b/speech_to_text/example/ios/Runner.xcodeproj/project.xcworkspace/contents.xcworkspacedata @@ -0,0 +1,7 @@ + + + + + diff --git a/speech_to_text/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme b/speech_to_text/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme new file mode 100644 index 00000000..a28140cf --- /dev/null +++ b/speech_to_text/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme @@ -0,0 +1,91 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/speech_to_text/example/ios/Runner.xcworkspace/contents.xcworkspacedata b/speech_to_text/example/ios/Runner.xcworkspace/contents.xcworkspacedata new file mode 100644 index 00000000..21a3cc14 --- /dev/null +++ b/speech_to_text/example/ios/Runner.xcworkspace/contents.xcworkspacedata @@ -0,0 +1,10 @@ + + + + + + + diff --git a/speech_to_text/example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/speech_to_text/example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist new file mode 100644 index 00000000..18d98100 --- /dev/null +++ b/speech_to_text/example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist @@ -0,0 +1,8 @@ + + + + + IDEDidComputeMac32BitWarning + + + diff --git a/speech_to_text/example/ios/Runner/AppDelegate.swift b/speech_to_text/example/ios/Runner/AppDelegate.swift new file mode 100644 index 00000000..70693e4a --- /dev/null +++ b/speech_to_text/example/ios/Runner/AppDelegate.swift @@ -0,0 +1,13 @@ +import UIKit +import Flutter + +@UIApplicationMain +@objc class AppDelegate: FlutterAppDelegate { + override func application( + _ application: UIApplication, + didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]? + ) -> Bool { + GeneratedPluginRegistrant.register(with: self) + return super.application(application, didFinishLaunchingWithOptions: launchOptions) + } +} diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 00000000..d36b1fab --- /dev/null +++ b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,122 @@ +{ + "images" : [ + { + "size" : "20x20", + "idiom" : "iphone", + "filename" : "Icon-App-20x20@2x.png", + "scale" : "2x" + }, + { + "size" : "20x20", + "idiom" : "iphone", + "filename" : "Icon-App-20x20@3x.png", + "scale" : "3x" + }, + { + "size" : "29x29", + "idiom" : "iphone", + "filename" : "Icon-App-29x29@1x.png", + "scale" : "1x" + }, + { + "size" : "29x29", + "idiom" : "iphone", + "filename" : "Icon-App-29x29@2x.png", + "scale" : "2x" + }, + { + "size" : "29x29", + "idiom" : "iphone", + "filename" : "Icon-App-29x29@3x.png", + "scale" : "3x" + }, + { + "size" : "40x40", + "idiom" : "iphone", + "filename" : "Icon-App-40x40@2x.png", + "scale" : "2x" + }, + { + "size" : "40x40", + "idiom" : "iphone", + "filename" : "Icon-App-40x40@3x.png", + "scale" : "3x" + }, + { + "size" : "60x60", + "idiom" : "iphone", + "filename" : "Icon-App-60x60@2x.png", + "scale" : "2x" + }, + { + "size" : "60x60", + "idiom" : "iphone", + "filename" : "Icon-App-60x60@3x.png", + "scale" : "3x" + }, + { + "size" : "20x20", + "idiom" : "ipad", + "filename" : "Icon-App-20x20@1x.png", + "scale" : "1x" + }, + { + "size" : "20x20", + "idiom" : "ipad", + "filename" : "Icon-App-20x20@2x.png", + "scale" : "2x" + }, + { + "size" : "29x29", + "idiom" : "ipad", + "filename" : "Icon-App-29x29@1x.png", + "scale" : "1x" + }, + { + "size" : "29x29", + "idiom" : "ipad", + "filename" : "Icon-App-29x29@2x.png", + "scale" : "2x" + }, + { + "size" : "40x40", + "idiom" : "ipad", + "filename" : "Icon-App-40x40@1x.png", + "scale" : "1x" + }, + { + "size" : "40x40", + "idiom" : "ipad", + "filename" : "Icon-App-40x40@2x.png", + "scale" : "2x" + }, + { + "size" : "76x76", + "idiom" : "ipad", + "filename" : "Icon-App-76x76@1x.png", + "scale" : "1x" + }, + { + "size" : "76x76", + "idiom" : "ipad", + "filename" : "Icon-App-76x76@2x.png", + "scale" : "2x" + }, + { + "size" : "83.5x83.5", + "idiom" : "ipad", + "filename" : "Icon-App-83.5x83.5@2x.png", + "scale" : "2x" + }, + { + "size" : "1024x1024", + "idiom" : "ios-marketing", + "filename" : "Icon-App-1024x1024@1x.png", + "scale" : "1x" + } + ], + "info" : { + "version" : 1, + "author" : "xcode" + } +} diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-1024x1024@1x.png new file mode 100644 index 0000000000000000000000000000000000000000..dc9ada4725e9b0ddb1deab583e5b5102493aa332 GIT binary patch literal 10932 zcmeHN2~<R zh`|8`A_PQ1nSu(UMFx?8j8PC!!VDphaL#`F42fd#7Vlc`zIE4n%Y~eiz4y1j|NDpi z?<@|pSJ-HM`qifhf@m%MamgwK83`XpBA<+azdF#2QsT{X@z0A9Bq>~TVErigKH1~P zRX-!h-f0NJ4Mh++{D}J+K>~~rq}d%o%+4dogzXp7RxX4C>Km5XEI|PAFDmo;DFm6G zzjVoB`@qW98Yl0Kvc-9w09^PrsobmG*Eju^=3f?0o-t$U)TL1B3;sZ^!++3&bGZ!o-*6w?;oOhf z=A+Qb$scV5!RbG+&2S}BQ6YH!FKb0``VVX~T$dzzeSZ$&9=X$3)_7Z{SspSYJ!lGE z7yig_41zpQ)%5dr4ff0rh$@ky3-JLRk&DK)NEIHecf9c*?Z1bUB4%pZjQ7hD!A0r-@NF(^WKdr(LXj|=UE7?gBYGgGQV zidf2`ZT@pzXf7}!NH4q(0IMcxsUGDih(0{kRSez&z?CFA0RVXsVFw3^u=^KMtt95q z43q$b*6#uQDLoiCAF_{RFc{!H^moH_cmll#Fc^KXi{9GDl{>%+3qyfOE5;Zq|6#Hb zp^#1G+z^AXfRKaa9HK;%b3Ux~U@q?xg<2DXP%6k!3E)PA<#4$ui8eDy5|9hA5&{?v z(-;*1%(1~-NTQ`Is1_MGdQ{+i*ccd96ab$R$T3=% zw_KuNF@vI!A>>Y_2pl9L{9h1-C6H8<)J4gKI6{WzGBi<@u3P6hNsXG=bRq5c+z;Gc3VUCe;LIIFDmQAGy+=mRyF++u=drBWV8-^>0yE9N&*05XHZpPlE zxu@?8(ZNy7rm?|<+UNe0Vs6&o?l`Pt>P&WaL~M&#Eh%`rg@Mbb)J&@DA-wheQ>hRV z<(XhigZAT z>=M;URcdCaiO3d^?H<^EiEMDV+7HsTiOhoaMX%P65E<(5xMPJKxf!0u>U~uVqnPN7T!X!o@_gs3Ct1 zlZ_$5QXP4{Aj645wG_SNT&6m|O6~Tsl$q?nK*)(`{J4b=(yb^nOATtF1_aS978$x3 zx>Q@s4i3~IT*+l{@dx~Hst21fR*+5}S1@cf>&8*uLw-0^zK(+OpW?cS-YG1QBZ5q! zgTAgivzoF#`cSz&HL>Ti!!v#?36I1*l^mkrx7Y|K6L#n!-~5=d3;K<;Zqi|gpNUn_ z_^GaQDEQ*jfzh;`j&KXb66fWEk1K7vxQIMQ_#Wu_%3 z4Oeb7FJ`8I>Px;^S?)}2+4D_83gHEq>8qSQY0PVP?o)zAv3K~;R$fnwTmI-=ZLK`= zTm+0h*e+Yfr(IlH3i7gUclNH^!MU>id$Jw>O?2i0Cila#v|twub21@e{S2v}8Z13( zNDrTXZVgris|qYm<0NU(tAPouG!QF4ZNpZPkX~{tVf8xY690JqY1NVdiTtW+NqyRP zZ&;T0ikb8V{wxmFhlLTQ&?OP7 z;(z*<+?J2~z*6asSe7h`$8~Se(@t(#%?BGLVs$p``;CyvcT?7Y!{tIPva$LxCQ&4W z6v#F*);|RXvI%qnoOY&i4S*EL&h%hP3O zLsrFZhv&Hu5tF$Lx!8(hs&?!Kx5&L(fdu}UI5d*wn~A`nPUhG&Rv z2#ixiJdhSF-K2tpVL=)5UkXRuPAFrEW}7mW=uAmtVQ&pGE-&az6@#-(Te^n*lrH^m@X-ftVcwO_#7{WI)5v(?>uC9GG{lcGXYJ~Q8q zbMFl7;t+kV;|;KkBW2!P_o%Czhw&Q(nXlxK9ak&6r5t_KH8#1Mr-*0}2h8R9XNkr zto5-b7P_auqTJb(TJlmJ9xreA=6d=d)CVbYP-r4$hDn5|TIhB>SReMfh&OVLkMk-T zYf%$taLF0OqYF?V{+6Xkn>iX@TuqQ?&cN6UjC9YF&%q{Ut3zv{U2)~$>-3;Dp)*(? zg*$mu8^i=-e#acaj*T$pNowo{xiGEk$%DusaQiS!KjJH96XZ-hXv+jk%ard#fu=@Q z$AM)YWvE^{%tDfK%nD49=PI|wYu}lYVbB#a7wtN^Nml@CE@{Gv7+jo{_V?I*jkdLD zJE|jfdrmVbkfS>rN*+`#l%ZUi5_bMS<>=MBDNlpiSb_tAF|Zy`K7kcp@|d?yaTmB^ zo?(vg;B$vxS|SszusORgDg-*Uitzdi{dUV+glA~R8V(?`3GZIl^egW{a919!j#>f` znL1o_^-b`}xnU0+~KIFLQ)$Q6#ym%)(GYC`^XM*{g zv3AM5$+TtDRs%`2TyR^$(hqE7Y1b&`Jd6dS6B#hDVbJlUXcG3y*439D8MrK!2D~6gn>UD4Imctb z+IvAt0iaW73Iq$K?4}H`7wq6YkTMm`tcktXgK0lKPmh=>h+l}Y+pDtvHnG>uqBA)l zAH6BV4F}v$(o$8Gfo*PB>IuaY1*^*`OTx4|hM8jZ?B6HY;F6p4{`OcZZ(us-RVwDx zUzJrCQlp@mz1ZFiSZ*$yX3c_#h9J;yBE$2g%xjmGF4ca z&yL`nGVs!Zxsh^j6i%$a*I3ZD2SoNT`{D%mU=LKaEwbN(_J5%i-6Va?@*>=3(dQy` zOv%$_9lcy9+(t>qohkuU4r_P=R^6ME+wFu&LA9tw9RA?azGhjrVJKy&8=*qZT5Dr8g--d+S8zAyJ$1HlW3Olryt`yE zFIph~Z6oF&o64rw{>lgZISC6p^CBer9C5G6yq%?8tC+)7*d+ib^?fU!JRFxynRLEZ zj;?PwtS}Ao#9whV@KEmwQgM0TVP{hs>dg(1*DiMUOKHdQGIqa0`yZnHk9mtbPfoLx zo;^V6pKUJ!5#n`w2D&381#5#_t}AlTGEgDz$^;u;-vxDN?^#5!zN9ngytY@oTv!nc zp1Xn8uR$1Z;7vY`-<*?DfPHB;x|GUi_fI9@I9SVRv1)qETbNU_8{5U|(>Du84qP#7 z*l9Y$SgA&wGbj>R1YeT9vYjZuC@|{rajTL0f%N@>3$DFU=`lSPl=Iv;EjuGjBa$Gw zHD-;%YOE@<-!7-Mn`0WuO3oWuL6tB2cpPw~Nvuj|KM@))ixuDK`9;jGMe2d)7gHin zS<>k@!x;!TJEc#HdL#RF(`|4W+H88d4V%zlh(7#{q2d0OQX9*FW^`^_<3r$kabWAB z$9BONo5}*(%kx zOXi-yM_cmB3>inPpI~)duvZykJ@^^aWzQ=eQ&STUa}2uT@lV&WoRzkUoE`rR0)`=l zFT%f|LA9fCw>`enm$p7W^E@U7RNBtsh{_-7vVz3DtB*y#*~(L9+x9*wn8VjWw|Q~q zKFsj1Yl>;}%MG3=PY`$g$_mnyhuV&~O~u~)968$0b2!Jkd;2MtAP#ZDYw9hmK_+M$ zb3pxyYC&|CuAbtiG8HZjj?MZJBFbt`ryf+c1dXFuC z0*ZQhBzNBd*}s6K_G}(|Z_9NDV162#y%WSNe|FTDDhx)K!c(mMJh@h87@8(^YdK$&d*^WQe8Z53 z(|@MRJ$Lk-&ii74MPIs80WsOFZ(NX23oR-?As+*aq6b?~62@fSVmM-_*cb1RzZ)`5$agEiL`-E9s7{GM2?(KNPgK1(+c*|-FKoy}X(D_b#etO|YR z(BGZ)0Ntfv-7R4GHoXp?l5g#*={S1{u-QzxCGng*oWr~@X-5f~RA14b8~B+pLKvr4 zfgL|7I>jlak9>D4=(i(cqYf7#318!OSR=^`xxvI!bBlS??`xxWeg?+|>MxaIdH1U~#1tHu zB{QMR?EGRmQ_l4p6YXJ{o(hh-7Tdm>TAX380TZZZyVkqHNzjUn*_|cb?T? zt;d2s-?B#Mc>T-gvBmQZx(y_cfkXZO~{N zT6rP7SD6g~n9QJ)8F*8uHxTLCAZ{l1Y&?6v)BOJZ)=R-pY=Y=&1}jE7fQ>USS}xP#exo57uND0i*rEk@$;nLvRB@u~s^dwRf?G?_enN@$t* zbL%JO=rV(3Ju8#GqUpeE3l_Wu1lN9Y{D4uaUe`g>zlj$1ER$6S6@{m1!~V|bYkhZA z%CvrDRTkHuajMU8;&RZ&itnC~iYLW4DVkP<$}>#&(`UO>!n)Po;Mt(SY8Yb`AS9lt znbX^i?Oe9r_o=?})IHKHoQGKXsps_SE{hwrg?6dMI|^+$CeC&z@*LuF+P`7LfZ*yr+KN8B4{Nzv<`A(wyR@!|gw{zB6Ha ziwPAYh)oJ(nlqSknu(8g9N&1hu0$vFK$W#mp%>X~AU1ay+EKWcFdif{% z#4!4aoVVJ;ULmkQf!ke2}3hqxLK>eq|-d7Ly7-J9zMpT`?dxo6HdfJA|t)?qPEVBDv z{y_b?4^|YA4%WW0VZd8C(ZgQzRI5(I^)=Ub`Y#MHc@nv0w-DaJAqsbEHDWG8Ia6ju zo-iyr*sq((gEwCC&^TYBWt4_@|81?=B-?#P6NMff(*^re zYqvDuO`K@`mjm_Jd;mW_tP`3$cS?R$jR1ZN09$YO%_iBqh5ftzSpMQQtxKFU=FYmP zeY^jph+g<4>YO;U^O>-NFLn~-RqlHvnZl2yd2A{Yc1G@Ga$d+Q&(f^tnPf+Z7serIU};17+2DU_f4Z z@GaPFut27d?!YiD+QP@)T=77cR9~MK@bd~pY%X(h%L={{OIb8IQmf-!xmZkm8A0Ga zQSWONI17_ru5wpHg3jI@i9D+_Y|pCqVuHJNdHUauTD=R$JcD2K_liQisqG$(sm=k9;L* z!L?*4B~ql7uioSX$zWJ?;q-SWXRFhz2Jt4%fOHA=Bwf|RzhwqdXGr78y$J)LR7&3T zE1WWz*>GPWKZ0%|@%6=fyx)5rzUpI;bCj>3RKzNG_1w$fIFCZ&UR0(7S?g}`&Pg$M zf`SLsz8wK82Vyj7;RyKmY{a8G{2BHG%w!^T|Njr!h9TO2LaP^_f22Q1=l$QiU84ao zHe_#{S6;qrC6w~7{y(hs-?-j?lbOfgH^E=XcSgnwW*eEz{_Z<_Px$?ny*JR5%f>l)FnDQ543{x%ZCiu33$Wg!pQFfT_}?5Q|_VSlIbLC`dpoMXL}9 zHfd9&47Mo(7D231gb+kjFxZHS4-m~7WurTH&doVX2KI5sU4v(sJ1@T9eCIKPjsqSr z)C01LsCxk=72-vXmX}CQD#BD;Cthymh&~=f$Q8nn0J<}ZrusBy4PvRNE}+1ceuj8u z0mW5k8fmgeLnTbWHGwfKA3@PdZxhn|PypR&^p?weGftrtCbjF#+zk_5BJh7;0`#Wr zgDpM_;Ax{jO##IrT`Oz;MvfwGfV$zD#c2xckpcXC6oou4ML~ezCc2EtnsQTB4tWNg z?4bkf;hG7IMfhgNI(FV5Gs4|*GyMTIY0$B=_*mso9Ityq$m^S>15>-?0(zQ<8Qy<_TjHE33(?_M8oaM zyc;NxzRVK@DL6RJnX%U^xW0Gpg(lXp(!uK1v0YgHjs^ZXSQ|m#lV7ip7{`C_J2TxPmfw%h$|%acrYHt)Re^PB%O&&=~a zhS(%I#+V>J-vjIib^<+s%ludY7y^C(P8nmqn9fp!i+?vr`bziDE=bx`%2W#Xyrj|i z!XQ4v1%L`m{7KT7q+LZNB^h8Ha2e=`Wp65^0;J00)_^G=au=8Yo;1b`CV&@#=jIBo zjN^JNVfYSs)+kDdGe7`1&8!?MQYKS?DuHZf3iogk_%#9E|5S zWeHrmAo>P;ejX7mwq#*}W25m^ZI+{(Z8fI?4jM_fffY0nok=+88^|*_DwcW>mR#e+ zX$F_KMdb6sRz!~7KkyN0G(3XQ+;z3X%PZ4gh;n-%62U<*VUKNv(D&Q->Na@Xb&u5Q3`3DGf+a8O5x7c#7+R+EAYl@R5us)CIw z7sT@_y~Ao@uL#&^LIh&QceqiT^+lb0YbFZt_SHOtWA%mgPEKVNvVgCsXy{5+zl*X8 zCJe)Q@y>wH^>l4;h1l^Y*9%-23TSmE>q5nI@?mt%n;Sj4Qq`Z+ib)a*a^cJc%E9^J zB;4s+K@rARbcBLT5P=@r;IVnBMKvT*)ew*R;&8vu%?Z&S>s?8?)3*YawM0P4!q$Kv zMmKh3lgE~&w&v%wVzH3Oe=jeNT=n@Y6J6TdHWTjXfX~-=1A1Bw`EW8rn}MqeI34nh zexFeA?&C3B2(E?0{drE@DA2pu(A#ElY&6el60Rn|Qpn-FkfQ8M93AfWIr)drgDFEU zghdWK)^71EWCP(@(=c4kfH1Y(4iugD4fve6;nSUpLT%!)MUHs1!zJYy4y||C+SwQ! z)KM&$7_tyM`sljP2fz6&Z;jxRn{Wup8IOUx8D4uh&(=O zx-7$a;U><*5L^!%xRlw)vAbh;sdlR||& ze}8_8%)c2Fwy=F&H|LM+p{pZB5DKTx>Y?F1N%BlZkXf!}JeGuMZk~LPi7{cidvUGB zAJ4LVeNV%XO>LTrklB#^-;8nb;}6l;1oW&WS=Mz*Az!4cqqQzbOSFq`$Q%PfD7srM zpKgP-D_0XPTRX*hAqeq0TDkJ;5HB1%$3Np)99#16c{ zJImlNL(npL!W|Gr_kxl1GVmF5&^$^YherS7+~q$p zt}{a=*RiD2Ikv6o=IM1kgc7zqpaZ;OB)P!1zz*i3{U()Dq#jG)egvK}@uFLa`oyWZ zf~=MV)|yJn`M^$N%ul5);JuQvaU1r2wt(}J_Qgyy`qWQI`hEeRX0uC@c1(dQ2}=U$ tNIIaX+dr)NRWXcxoR{>fqI{SF_dm1Ylv~=3YHI)h002ovPDHLkV1g(pWS;;4 literal 0 HcmV?d00001 diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-20x20@3x.png new file mode 100644 index 0000000000000000000000000000000000000000..f091b6b0bca859a3f474b03065bef75ba58a9e4c GIT binary patch literal 1588 zcmV-42Fv-0P)C1SqPt}wig>|5Crh^=oyX$BK<}M8eLU3e2hGT;=G|!_SP)7zNI6fqUMB=)y zRAZ>eDe#*r`yDAVgB_R*LB*MAc)8(b{g{9McCXW!lq7r(btRoB9!8B-#AI6JMb~YFBEvdsV)`mEQO^&#eRKx@b&x- z5lZm*!WfD8oCLzfHGz#u7sT0^VLMI1MqGxF^v+`4YYnVYgk*=kU?HsSz{v({E3lb9 z>+xILjBN)t6`=g~IBOelGQ(O990@BfXf(DRI5I$qN$0Gkz-FSc$3a+2fX$AedL4u{ z4V+5Ong(9LiGcIKW?_352sR;LtDPmPJXI{YtT=O8=76o9;*n%_m|xo!i>7$IrZ-{l z-x3`7M}qzHsPV@$v#>H-TpjDh2UE$9g6sysUREDy_R(a)>=eHw-WAyfIN z*qb!_hW>G)Tu8nSw9yn#3wFMiLcfc4pY0ek1}8(NqkBR@t4{~oC>ryc-h_ByH(Cg5 z>ao-}771+xE3um9lWAY1FeQFxowa1(!J(;Jg*wrg!=6FdRX+t_<%z&d&?|Bn){>zm zZQj(aA_HeBY&OC^jj*)N`8fa^ePOU72VpInJoI1?`ty#lvlNzs(&MZX+R%2xS~5Kh zX*|AU4QE#~SgPzOXe9>tRj>hjU@c1k5Y_mW*Jp3fI;)1&g3j|zDgC+}2Q_v%YfDax z!?umcN^n}KYQ|a$Lr+51Nf9dkkYFSjZZjkma$0KOj+;aQ&721~t7QUKx61J3(P4P1 zstI~7-wOACnWP4=8oGOwz%vNDqD8w&Q`qcNGGrbbf&0s9L0De{4{mRS?o0MU+nR_! zrvshUau0G^DeMhM_v{5BuLjb#Hh@r23lDAk8oF(C+P0rsBpv85EP>4CVMx#04MOfG z;P%vktHcXwTj~+IE(~px)3*MY77e}p#|c>TD?sMatC0Tu4iKKJ0(X8jxQY*gYtxsC z(zYC$g|@+I+kY;dg_dE>scBf&bP1Nc@Hz<3R)V`=AGkc;8CXqdi=B4l2k|g;2%#m& z*jfX^%b!A8#bI!j9-0Fi0bOXl(-c^AB9|nQaE`*)Hw+o&jS9@7&Gov#HbD~#d{twV zXd^Tr^mWLfFh$@Dr$e;PBEz4(-2q1FF0}c;~B5sA}+Q>TOoP+t>wf)V9Iy=5ruQa;z)y zI9C9*oUga6=hxw6QasLPnee@3^Rr*M{CdaL5=R41nLs(AHk_=Y+A9$2&H(B7!_pURs&8aNw7?`&Z&xY_Ye z)~D5Bog^td-^QbUtkTirdyK^mTHAOuptDflut!#^lnKqU md>ggs(5nOWAqO?umG&QVYK#ibz}*4>0000U6E9hRK9^#O7(mu>ETqrXGsduA8$)?`v2seloOCza43C{NQ$$gAOH**MCn0Q?+L7dl7qnbRdqZ8LSVp1ItDxhxD?t@5_yHg6A8yI zC*%Wgg22K|8E#!~cTNYR~@Y9KepMPrrB8cABapAFa=`H+UGhkXUZV1GnwR1*lPyZ;*K(i~2gp|@bzp8}og7e*#% zEnr|^CWdVV!-4*Y_7rFvlww2Ze+>j*!Z!pQ?2l->4q#nqRu9`ELo6RMS5=br47g_X zRw}P9a7RRYQ%2Vsd0Me{_(EggTnuN6j=-?uFS6j^u69elMypu?t>op*wBx<=Wx8?( ztpe^(fwM6jJX7M-l*k3kEpWOl_Vk3@(_w4oc}4YF4|Rt=2V^XU?#Yz`8(e?aZ@#li0n*=g^qOcVpd-Wbok=@b#Yw zqn8u9a)z>l(1kEaPYZ6hwubN6i<8QHgsu0oE) ziJ(p;Wxm>sf!K+cw>R-(^Y2_bahB+&KI9y^);#0qt}t-$C|Bo71lHi{_+lg#f%RFy z0um=e3$K3i6K{U_4K!EX?F&rExl^W|G8Z8;`5z-k}OGNZ0#WVb$WCpQu-_YsiqKP?BB# vzVHS-CTUF4Ozn5G+mq_~Qqto~ahA+K`|lyv3(-e}00000NkvXXu0mjfd`9t{ literal 0 HcmV?d00001 diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-29x29@2x.png new file mode 100644 index 0000000000000000000000000000000000000000..d0ef06e7edb86cdfe0d15b4b0d98334a86163658 GIT binary patch literal 1716 zcmds$`#;kQ7{|XelZftyR5~xW7?MLxS4^|Hw3&P7^y)@A9Fj{Xm1~_CIV^XZ%SLBn zA;!r`GqGHg=7>xrB{?psZQs88ZaedDoagm^KF{a*>G|dJWRSe^I$DNW008I^+;Kjt z>9p3GNR^I;v>5_`+91i(*G;u5|L+Bu6M=(afLjtkya#yZ175|z$pU~>2#^Z_pCZ7o z1c6UNcv2B3?; zX%qdxCXQpdKRz=#b*q0P%b&o)5ZrNZt7$fiETSK_VaY=mb4GK`#~0K#~9^ zcY!`#Af+4h?UMR-gMKOmpuYeN5P*RKF!(tb`)oe0j2BH1l?=>y#S5pMqkx6i{*=V9JF%>N8`ewGhRE(|WohnD59R^$_36{4>S zDFlPC5|k?;SPsDo87!B{6*7eqmMdU|QZ84>6)Kd9wNfh90=y=TFQay-0__>=<4pk& zYDjgIhL-jQ9o>z32K)BgAH+HxamL{ZL~ozu)Qqe@a`FpH=oQRA8=L-m-1dam(Ix2V z?du;LdMO+ooBelr^_y4{|44tmgH^2hSzPFd;U^!1p>6d|o)(-01z{i&Kj@)z-yfWQ)V#3Uo!_U}q3u`(fOs`_f^ueFii1xBNUB z6MecwJN$CqV&vhc+)b(p4NzGGEgwWNs z@*lUV6LaduZH)4_g!cE<2G6#+hJrWd5(|p1Z;YJ7ifVHv+n49btR}dq?HHDjl{m$T z!jLZcGkb&XS2OG~u%&R$(X+Z`CWec%QKt>NGYvd5g20)PU(dOn^7%@6kQb}C(%=vr z{?RP(z~C9DPnL{q^@pVw@|Vx~@3v!9dCaBtbh2EdtoNHm4kGxp>i#ct)7p|$QJs+U z-a3qtcPvhihub?wnJqEt>zC@)2suY?%-96cYCm$Q8R%-8$PZYsx3~QOLMDf(piXMm zB=<63yQk1AdOz#-qsEDX>>c)EES%$owHKue;?B3)8aRd}m~_)>SL3h2(9X;|+2#7X z+#2)NpD%qJvCQ0a-uzZLmz*ms+l*N}w)3LRQ*6>|Ub-fyptY(keUxw+)jfwF5K{L9 z|Cl_w=`!l_o><384d&?)$6Nh(GAm=4p_;{qVn#hI8lqewW7~wUlyBM-4Z|)cZr?Rh z=xZ&Ol>4(CU85ea(CZ^aO@2N18K>ftl8>2MqetAR53_JA>Fal`^)1Y--Am~UDa4th zKfCYpcXky$XSFDWBMIl(q=Mxj$iMBX=|j9P)^fDmF(5(5$|?Cx}DKEJa&XZP%OyE`*GvvYQ4PV&!g2|L^Q z?YG}tx;sY@GzMmsY`7r$P+F_YLz)(e}% zyakqFB<6|x9R#TdoP{R$>o7y(-`$$p0NxJ6?2B8tH)4^yF(WhqGZlM3=9Ibs$%U1w zWzcss*_c0=v_+^bfb`kBFsI`d;ElwiU%frgRB%qBjn@!0U2zZehBn|{%uNIKBA7n= zzE`nnwTP85{g;8AkYxA68>#muXa!G>xH22D1I*SiD~7C?7Za+9y7j1SHiuSkKK*^O zsZ==KO(Ua#?YUpXl{ViynyT#Hzk=}5X$e04O@fsMQjb}EMuPWFO0e&8(2N(29$@Vd zn1h8Yd>6z(*p^E{c(L0Lg=wVdupg!z@WG;E0k|4a%s7Up5C0c)55XVK*|x9RQeZ1J@1v9MX;>n34(i>=YE@Iur`0Vah(inE3VUFZNqf~tSz{1fz3Fsn_x4F>o(Yo;kpqvBe-sbwH(*Y zu$JOl0b83zu$JMvy<#oH^Wl>aWL*?aDwnS0iEAwC?DK@aT)GHRLhnz2WCvf3Ba;o=aY7 z2{Asu5MEjGOY4O#Ggz@@J;q*0`kd2n8I3BeNuMmYZf{}pg=jTdTCrIIYuW~luKecn z+E-pHY%ohj@uS0%^ z&(OxwPFPD$+#~`H?fMvi9geVLci(`K?Kj|w{rZ9JgthFHV+=6vMbK~0)Ea<&WY-NC zy-PnZft_k2tfeQ*SuC=nUj4H%SQ&Y$gbH4#2sT0cU0SdFs=*W*4hKGpuR1{)mV;Qf5pw4? zfiQgy0w3fC*w&Bj#{&=7033qFR*<*61B4f9K%CQvxEn&bsWJ{&winp;FP!KBj=(P6 z4Z_n4L7cS;ao2)ax?Tm|I1pH|uLpDSRVghkA_UtFFuZ0b2#>!8;>-_0ELjQSD-DRd z4im;599VHDZYtnWZGAB25W-e(2VrzEh|etsv2YoP#VbIZ{aFkwPrzJ#JvCvA*mXS& z`}Q^v9(W4GiSs}#s7BaN!WA2bniM$0J(#;MR>uIJ^uvgD3GS^%*ikdW6-!VFUU?JV zZc2)4cMsX@j z5HQ^e3BUzOdm}yC-xA%SY``k$rbfk z;CHqifhU*jfGM@DkYCecD9vl*qr58l6x<8URB=&%{!Cu3RO*MrKZ4VO}V6R0a zZw3Eg^0iKWM1dcTYZ0>N899=r6?+adUiBKPciJw}L$=1f4cs^bio&cr9baLF>6#BM z(F}EXe-`F=f_@`A7+Q&|QaZ??Txp_dB#lg!NH=t3$G8&06MFhwR=Iu*Im0s_b2B@| znW>X}sy~m#EW)&6E&!*0%}8UAS)wjt+A(io#wGI@Z2S+Ms1Cxl%YVE800007ip7{`C_J2TxPmfw%h$|%acrYHt)Re^PB%O&&=~a zhS(%I#+V>J-vjIib^<+s%ludY7y^C(P8nmqn9fp!i+?vr`bziDE=bx`%2W#Xyrj|i z!XQ4v1%L`m{7KT7q+LZNB^h8Ha2e=`Wp65^0;J00)_^G=au=8Yo;1b`CV&@#=jIBo zjN^JNVfYSs)+kDdGe7`1&8!?MQYKS?DuHZf3iogk_%#9E|5S zWeHrmAo>P;ejX7mwq#*}W25m^ZI+{(Z8fI?4jM_fffY0nok=+88^|*_DwcW>mR#e+ zX$F_KMdb6sRz!~7KkyN0G(3XQ+;z3X%PZ4gh;n-%62U<*VUKNv(D&Q->Na@Xb&u5Q3`3DGf+a8O5x7c#7+R+EAYl@R5us)CIw z7sT@_y~Ao@uL#&^LIh&QceqiT^+lb0YbFZt_SHOtWA%mgPEKVNvVgCsXy{5+zl*X8 zCJe)Q@y>wH^>l4;h1l^Y*9%-23TSmE>q5nI@?mt%n;Sj4Qq`Z+ib)a*a^cJc%E9^J zB;4s+K@rARbcBLT5P=@r;IVnBMKvT*)ew*R;&8vu%?Z&S>s?8?)3*YawM0P4!q$Kv zMmKh3lgE~&w&v%wVzH3Oe=jeNT=n@Y6J6TdHWTjXfX~-=1A1Bw`EW8rn}MqeI34nh zexFeA?&C3B2(E?0{drE@DA2pu(A#ElY&6el60Rn|Qpn-FkfQ8M93AfWIr)drgDFEU zghdWK)^71EWCP(@(=c4kfH1Y(4iugD4fve6;nSUpLT%!)MUHs1!zJYy4y||C+SwQ! z)KM&$7_tyM`sljP2fz6&Z;jxRn{Wup8IOUx8D4uh&(=O zx-7$a;U><*5L^!%xRlw)vAbh;sdlR||& ze}8_8%)c2Fwy=F&H|LM+p{pZB5DKTx>Y?F1N%BlZkXf!}JeGuMZk~LPi7{cidvUGB zAJ4LVeNV%XO>LTrklB#^-;8nb;}6l;1oW&WS=Mz*Az!4cqqQzbOSFq`$Q%PfD7srM zpKgP-D_0XPTRX*hAqeq0TDkJ;5HB1%$3Np)99#16c{ zJImlNL(npL!W|Gr_kxl1GVmF5&^$^YherS7+~q$p zt}{a=*RiD2Ikv6o=IM1kgc7zqpaZ;OB)P!1zz*i3{U()Dq#jG)egvK}@uFLa`oyWZ zf~=MV)|yJn`M^$N%ul5);JuQvaU1r2wt(}J_Qgyy`qWQI`hEeRX0uC@c1(dQ2}=U$ tNIIaX+dr)NRWXcxoR{>fqI{SF_dm1Ylv~=3YHI)h002ovPDHLkV1g(pWS;;4 literal 0 HcmV?d00001 diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@2x.png new file mode 100644 index 0000000000000000000000000000000000000000..c8f9ed8f5cee1c98386d13b17e89f719e83555b2 GIT binary patch literal 1895 zcmV-t2blPYP)FQtfgmafE#=YDCq`qUBt#QpG%*H6QHY765~R=q zZ6iudfM}q!Pz#~9JgOi8QJ|DSu?1-*(kSi1K4#~5?#|rh?sS)(-JQqX*}ciXJ56_H zdw=^s_srbAdqxlvGyrgGet#6T7_|j;95sL%MtM;q86vOxKM$f#puR)Bjv9Zvz9-di zXOTSsZkM83)E9PYBXC<$6(|>lNLVBb&&6y{NByFCp%6+^ALR@NCTse_wqvNmSWI-m z!$%KlHFH2omF!>#%1l3LTZg(s7eof$7*xB)ZQ0h?ejh?Ta9fDv59+u#MokW+1t8Zb zgHv%K(u9G^Lv`lh#f3<6!JVTL3(dCpxHbnbA;kKqQyd1~^Xe0VIaYBSWm6nsr;dFj z4;G-RyL?cYgsN1{L4ZFFNa;8)Rv0fM0C(~Tkit94 zz#~A)59?QjD&pAPSEQ)p8gP|DS{ng)j=2ux)_EzzJ773GmQ_Cic%3JJhC0t2cx>|v zJcVusIB!%F90{+}8hG3QU4KNeKmK%T>mN57NnCZ^56=0?&3@!j>a>B43pi{!u z7JyDj7`6d)qVp^R=%j>UIY6f+3`+qzIc!Y_=+uN^3BYV|o+$vGo-j-Wm<10%A=(Yk^beI{t%ld@yhKjq0iNjqN4XMGgQtbKubPM$JWBz}YA65k%dm*awtC^+f;a-x4+ddbH^7iDWGg&N0n#MW{kA|=8iMUiFYvMoDY@sPC#t$55gn6ykUTPAr`a@!(;np824>2xJthS z*ZdmT`g5-`BuJs`0LVhz+D9NNa3<=6m;cQLaF?tCv8)zcRSh66*Z|vXhG@$I%U~2l z?`Q zykI#*+rQ=z6Jm=Bui-SfpDYLA=|vzGE(dYm=OC8XM&MDo7ux4UF1~0J1+i%aCUpRe zt3L_uNyQ*cE(38Uy03H%I*)*Bh=Lb^Xj3?I^Hnbeq72(EOK^Y93CNp*uAA{5Lc=ky zx=~RKa4{iTm{_>_vSCm?$Ej=i6@=m%@VvAITnigVg{&@!7CDgs908761meDK5azA} z4?=NOH|PdvabgJ&fW2{Mo$Q0CcD8Qc84%{JPYt5EiG{MdLIAeX%T=D7NIP4%Hw}p9 zg)==!2Lbp#j{u_}hMiao9=!VSyx0gHbeCS`;q&vzeq|fs`y&^X-lso(Ls@-706qmA z7u*T5PMo_w3{se1t2`zWeO^hOvTsohG_;>J0wVqVe+n)AbQCx)yh9;w+J6?NF5Lmo zecS@ieAKL8%bVd@+-KT{yI|S}O>pYckUFs;ry9Ow$CD@ztz5K-*D$^{i(_1llhSh^ zEkL$}tsQt5>QA^;QgjgIfBDmcOgi5YDyu?t6vSnbp=1+@6D& z5MJ}B8q;bRlVoxasyhcUF1+)o`&3r0colr}QJ3hcSdLu;9;td>kf@Tcn<@9sIx&=m z;AD;SCh95=&p;$r{Xz3iWCO^MX83AGJ(yH&eTXgv|0=34#-&WAmw{)U7OU9!Wz^!7 zZ%jZFi@JR;>Mhi7S>V7wQ176|FdW2m?&`qa(ScO^CFPR80HucLHOTy%5s*HR0^8)i h0WYBP*#0Ks^FNSabJA*5${_#%002ovPDHLkV1oKhTl@e3 literal 0 HcmV?d00001 diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-40x40@3x.png new file mode 100644 index 0000000000000000000000000000000000000000..a6d6b8609df07bf62e5100a53a01510388bd2b22 GIT binary patch literal 2665 zcmV-v3YPVWP)oFh3q0MFesq&64WThn3$;G69TfjsAv=f2G9}p zgSx99+!YV6qME!>9MD13x)k(+XE7W?_O4LoLb5ND8 zaV{9+P@>42xDfRiYBMSgD$0!vssptcb;&?u9u(LLBKmkZ>RMD=kvD3h`sk6!QYtBa ztlZI#nu$8lJ^q2Z79UTgZe>BU73(Aospiq+?SdMt8lDZ;*?@tyWVZVS_Q7S&*tJaiRlJ z+aSMOmbg3@h5}v;A*c8SbqM3icg-`Cnwl;7Ts%A1RkNIp+Txl-Ckkvg4oxrqGA5ewEgYqwtECD<_3Egu)xGllKt&J8g&+=ac@Jq4-?w6M3b*>w5 z69N3O%=I^6&UL5gZ!}trC7bUj*12xLdkNs~Bz4QdJJ*UDZox2UGR}SNg@lmOvhCc~ z*f_UeXv(=#I#*7>VZx2ObEN~UoGUTl=-@)E;YtCRZ>SVp$p9yG5hEFZ!`wI!spd)n zSk+vK0Vin7FL{7f&6OB%f;SH22dtbcF<|9fi2Fp%q4kxL!b1#l^)8dUwJ zwEf{(wJj@8iYDVnKB`eSU+;ml-t2`@%_)0jDM`+a46xhDbBj2+&Ih>1A>6aky#(-SYyE{R3f#y57wfLs z6w1p~$bp;6!9DX$M+J~S@D6vJAaElETnsX4h9a5tvPhC3L@qB~bOzkL@^z0k_hS{T4PF*TDrgdXp+dzsE? z>V|VR035Pl9n5&-RePFdS{7KAr2vPOqR9=M$vXA1Yy5>w;EsF`;OK{2pkn-kpp9Pw z)r;5JfJKKaT$4qCb{TaXHjb$QA{y0EYy*+b1XI;6Ah- zw13P)xT`>~eFoJC!>{2XL(a_#upp3gaR1#5+L(Jmzp4TBnx{~WHedpJ1ch8JFk~Sw z>F+gN+i+VD?gMXwcIhn8rz`>e>J^TI3E-MW>f}6R-pL}>WMOa0k#jN+`RyUVUC;#D zg|~oS^$6%wpF{^Qr+}X>0PKcr3Fc&>Z>uv@C);pwDs@2bZWhYP!rvGx?_|q{d`t<*XEb#=aOb=N+L@CVBGqImZf&+a zCQEa3$~@#kC);pasdG=f6tuIi0PO-y&tvX%>Mv=oY3U$nD zJ#gMegnQ46pq+3r=;zmgcG+zRc9D~c>z+jo9&D+`E6$LmyFqlmCYw;-Zooma{sR@~ z)_^|YL1&&@|GXo*pivH7k!msl+$Sew3%XJnxajt0K%3M6Bd&YFNy9}tWG^aovK2eX z1aL1%7;KRDrA@eG-Wr6w+;*H_VD~qLiVI`{_;>o)k`{8xa3EJT1O_>#iy_?va0eR? zDV=N%;Zjb%Z2s$@O>w@iqt!I}tLjGk!=p`D23I}N4Be@$(|iSA zf3Ih7b<{zqpDB4WF_5X1(peKe+rASze%u8eKLn#KKXt;UZ+Adf$_TO+vTqshLLJ5c z52HucO=lrNVae5XWOLm!V@n-ObU11!b+DN<$RuU+YsrBq*lYT;?AwJpmNKniF0Q1< zJCo>Q$=v$@&y=sj6{r!Y&y&`0$-I}S!H_~pI&2H8Z1C|BX4VgZ^-! zje3-;x0PBD!M`v*J_)rL^+$<1VJhH*2Fi~aA7s&@_rUHYJ9zD=M%4AFQ`}k8OC$9s XsPq=LnkwKG00000NkvXXu0mjfhAk5^ literal 0 HcmV?d00001 diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@2x.png new file mode 100644 index 0000000000000000000000000000000000000000..a6d6b8609df07bf62e5100a53a01510388bd2b22 GIT binary patch literal 2665 zcmV-v3YPVWP)oFh3q0MFesq&64WThn3$;G69TfjsAv=f2G9}p zgSx99+!YV6qME!>9MD13x)k(+XE7W?_O4LoLb5ND8 zaV{9+P@>42xDfRiYBMSgD$0!vssptcb;&?u9u(LLBKmkZ>RMD=kvD3h`sk6!QYtBa ztlZI#nu$8lJ^q2Z79UTgZe>BU73(Aospiq+?SdMt8lDZ;*?@tyWVZVS_Q7S&*tJaiRlJ z+aSMOmbg3@h5}v;A*c8SbqM3icg-`Cnwl;7Ts%A1RkNIp+Txl-Ckkvg4oxrqGA5ewEgYqwtECD<_3Egu)xGllKt&J8g&+=ac@Jq4-?w6M3b*>w5 z69N3O%=I^6&UL5gZ!}trC7bUj*12xLdkNs~Bz4QdJJ*UDZox2UGR}SNg@lmOvhCc~ z*f_UeXv(=#I#*7>VZx2ObEN~UoGUTl=-@)E;YtCRZ>SVp$p9yG5hEFZ!`wI!spd)n zSk+vK0Vin7FL{7f&6OB%f;SH22dtbcF<|9fi2Fp%q4kxL!b1#l^)8dUwJ zwEf{(wJj@8iYDVnKB`eSU+;ml-t2`@%_)0jDM`+a46xhDbBj2+&Ih>1A>6aky#(-SYyE{R3f#y57wfLs z6w1p~$bp;6!9DX$M+J~S@D6vJAaElETnsX4h9a5tvPhC3L@qB~bOzkL@^z0k_hS{T4PF*TDrgdXp+dzsE? z>V|VR035Pl9n5&-RePFdS{7KAr2vPOqR9=M$vXA1Yy5>w;EsF`;OK{2pkn-kpp9Pw z)r;5JfJKKaT$4qCb{TaXHjb$QA{y0EYy*+b1XI;6Ah- zw13P)xT`>~eFoJC!>{2XL(a_#upp3gaR1#5+L(Jmzp4TBnx{~WHedpJ1ch8JFk~Sw z>F+gN+i+VD?gMXwcIhn8rz`>e>J^TI3E-MW>f}6R-pL}>WMOa0k#jN+`RyUVUC;#D zg|~oS^$6%wpF{^Qr+}X>0PKcr3Fc&>Z>uv@C);pwDs@2bZWhYP!rvGx?_|q{d`t<*XEb#=aOb=N+L@CVBGqImZf&+a zCQEa3$~@#kC);pasdG=f6tuIi0PO-y&tvX%>Mv=oY3U$nD zJ#gMegnQ46pq+3r=;zmgcG+zRc9D~c>z+jo9&D+`E6$LmyFqlmCYw;-Zooma{sR@~ z)_^|YL1&&@|GXo*pivH7k!msl+$Sew3%XJnxajt0K%3M6Bd&YFNy9}tWG^aovK2eX z1aL1%7;KRDrA@eG-Wr6w+;*H_VD~qLiVI`{_;>o)k`{8xa3EJT1O_>#iy_?va0eR? zDV=N%;Zjb%Z2s$@O>w@iqt!I}tLjGk!=p`D23I}N4Be@$(|iSA zf3Ih7b<{zqpDB4WF_5X1(peKe+rASze%u8eKLn#KKXt;UZ+Adf$_TO+vTqshLLJ5c z52HucO=lrNVae5XWOLm!V@n-ObU11!b+DN<$RuU+YsrBq*lYT;?AwJpmNKniF0Q1< zJCo>Q$=v$@&y=sj6{r!Y&y&`0$-I}S!H_~pI&2H8Z1C|BX4VgZ^-! zje3-;x0PBD!M`v*J_)rL^+$<1VJhH*2Fi~aA7s&@_rUHYJ9zD=M%4AFQ`}k8OC$9s XsPq=LnkwKG00000NkvXXu0mjfhAk5^ literal 0 HcmV?d00001 diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-60x60@3x.png new file mode 100644 index 0000000000000000000000000000000000000000..75b2d164a5a98e212cca15ea7bf2ab5de5108680 GIT binary patch literal 3831 zcmVjJBgitF5mAp-i>4+KS_oR{|13AP->1TD4=w)g|)JHOx|a2Wk1Va z!k)vP$UcQ#mdj%wNQoaJ!w>jv_6&JPyutpQps?s5dmDQ>`%?Bvj>o<%kYG!YW6H-z zu`g$@mp`;qDR!51QaS}|ZToSuAGcJ7$2HF0z`ln4t!#Yg46>;vGG9N9{V@9z#}6v* zfP?}r6b{*-C*)(S>NECI_E~{QYzN5SXRmVnP<=gzP+_Sp(Aza_hKlZ{C1D&l*(7IKXxQC1Z9#6wx}YrGcn~g%;icdw>T0Rf^w0{ z$_wn1J+C0@!jCV<%Go5LA45e{5gY9PvZp8uM$=1}XDI+9m7!A95L>q>>oe0$nC->i zeexUIvq%Uk<-$>DiDb?!In)lAmtuMWxvWlk`2>4lNuhSsjAf2*2tjT`y;@d}($o)S zn(+W&hJ1p0xy@oxP%AM15->wPLp{H!k)BdBD$toBpJh+crWdsNV)qsHaqLg2_s|Ih z`8E9z{E3sA!}5aKu?T!#enD(wLw?IT?k-yWVHZ8Akz4k5(TZJN^zZgm&zM28sfTD2BYJ|Fde3Xzh;;S` z=GXTnY4Xc)8nYoz6&vF;P7{xRF-{|2Xs5>a5)@BrnQ}I(_x7Cgpx#5&Td^4Q9_FnQ zX5so*;#8-J8#c$OlA&JyPp$LKUhC~-e~Ij!L%uSMu!-VZG7Hx-L{m2DVR2i=GR(_% zCVD!4N`I)&Q5S`?P&fQZ=4#Dgt_v2-DzkT}K(9gF0L(owe-Id$Rc2qZVLqI_M_DyO z9@LC#U28_LU{;wGZ&))}0R2P4MhajKCd^K#D+JJ&JIXZ_p#@+7J9A&P<0kdRujtQ_ zOy>3=C$kgi6$0pW06KaLz!21oOryKM3ZUOWqppndxfH}QpgjEJ`j7Tzn5bk6K&@RA?vl##y z$?V~1E(!wB5rH`>3nc&@)|#<1dN2cMzzm=PGhQ|Yppne(C-Vlt450IXc`J4R0W@I7 zd1e5uW6juvO%ni(WX7BsKx3MLngO7rHO;^R5I~0^nE^9^E_eYLgiR9&KnJ)pBbfno zSVnW$0R+&6jOOsZ82}nJ126+c|%svPo;TeUku<2G7%?$oft zyaO;tVo}(W)VsTUhq^XmFi#2z%-W9a{7mXn{uzivYQ_d6b7VJG{77naW(vHt-uhnY zVN#d!JTqVh(7r-lhtXVU6o})aZbDt_;&wJVGl2FKYFBFpU-#9U)z#(A%=IVnqytR$SY-sO( z($oNE09{D^@OuYPz&w~?9>Fl5`g9u&ecFGhqX=^#fmR=we0CJw+5xna*@oHnkahk+ z9aWeE3v|An+O5%?4fA&$Fgu~H_YmqR!yIU!bFCk4!#pAj%(lI(A5n)n@Id#M)O9Yx zJU9oKy{sRAIV3=5>(s8n{8ryJ!;ho}%pn6hZKTKbqk=&m=f*UnK$zW3YQP*)pw$O* zIfLA^!-bmBl6%d_n$#tP8Zd_(XdA*z*WH|E_yILwjtI~;jK#v-6jMl^?<%Y%`gvpwv&cFb$||^v4D&V=aNy?NGo620jL3VZnA%s zH~I|qPzB~e(;p;b^gJr7Ure#7?8%F0m4vzzPy^^(q4q1OdthF}Fi*RmVZN1OwTsAP zn9CZP`FazX3^kG(KodIZ=Kty8DLTy--UKfa1$6XugS zk%6v$Kmxt6U!YMx0JQ)0qX*{CXwZZk$vEROidEc7=J-1;peNat!vS<3P-FT5po>iE z!l3R+<`#x|+_hw!HjQGV=8!q|76y8L7N8gP3$%0kfush|u0uU^?dKBaeRSBUpOZ0c z62;D&Mdn2}N}xHRFTRI?zRv=>=AjHgH}`2k4WK=#AHB)UFrR-J87GgX*x5fL^W2#d z=(%K8-oZfMO=i{aWRDg=FX}UubM4eotRDcn;OR#{3q=*?3mE3_oJ-~prjhxh%PgQT zyn)Qozaq0@o&|LEgS{Ind4Swsr;b`u185hZPOBLL<`d2%^Yp1?oL)=jnLi;Zo0ZDliTtQ^b5SmfIMe{T==zZkbvn$KTQGlbG8w}s@M3TZnde;1Am46P3juKb zl9GU&3F=q`>j!`?SyH#r@O59%@aMX^rx}Nxe<>NqpUp5=lX1ojGDIR*-D^SDuvCKF z?3$xG(gVUsBERef_YjPFl^rU9EtD{pt z0CXwpN7BN3!8>hajGaTVk-wl=9rxmfWtIhC{mheHgStLi^+Nz12a?4r(fz)?3A%at zMlvQmL<2-R)-@G1wJ0^zQK%mR=r4d{Y3fHp){nWXUL#|CqXl(+v+qDh>FkF9`eWrW zfr^D%LNfOcTNvtx0JXR35J0~Jpi2#P3Q&80w+nqNfc}&G0A~*)lGHKv=^FE+b(37|)zL;KLF>oiGfb(?&1 zV3XRu!Sw>@quKiab%g6jun#oZ%!>V#A%+lNc?q>6+VvyAn=kf_6z^(TZUa4Eelh{{ zqFX-#dY(EV@7l$NE&kv9u9BR8&Ojd#ZGJ6l8_BW}^r?DIS_rU2(XaGOK z225E@kH5Opf+CgD^{y29jD4gHbGf{1MD6ggQ&%>UG4WyPh5q_tb`{@_34B?xfSO*| zZv8!)q;^o-bz`MuxXk*G^}(6)ACb@=Lfs`Hxoh>`Y0NE8QRQ!*p|SH@{r8=%RKd4p z+#Ty^-0kb=-H-O`nAA3_6>2z(D=~Tbs(n8LHxD0`R0_ATFqp-SdY3(bZ3;VUM?J=O zKCNsxsgt@|&nKMC=*+ZqmLHhX1KHbAJs{nGVMs6~TiF%Q)P@>!koa$%oS zjXa=!5>P`vC-a}ln!uH1ooeI&v?=?v7?1n~P(wZ~0>xWxd_Aw;+}9#eULM7M8&E?Y zC-ZLhi3RoM92SXUb-5i-Lmt5_rfjE{6y^+24`y$1lywLyHO!)Boa7438K4#iLe?rh z2O~YGSgFUBH?og*6=r9rme=peP~ah`(8Zt7V)j5!V0KPFf_mebo3z95U8(up$-+EA^9dTRLq>Yl)YMBuch9%=e5B`Vnb>o zt03=kq;k2TgGe4|lGne&zJa~h(UGutjP_zr?a7~#b)@15XNA>Dj(m=gg2Q5V4-$)D|Q9}R#002ovPDHLkV1o7DH3k3x literal 0 HcmV?d00001 diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/AppIcon.appiconset/Icon-App-76x76@1x.png new file mode 100644 index 0000000000000000000000000000000000000000..c4df70d39da7941ef3f6dcb7f06a192d8dcb308d GIT binary patch literal 1888 zcmV-m2cP(fP)x~L`~4d)Rspd&<9kFh{hn*KP1LP0~$;u(LfAu zp%fx&qLBcRHx$G|3q(bv@+b;o0*D|jwD-Q9uQR(l*ST}s+uPgQ-MeFwZ#GS?b332? z&Tk$&_miXn3IGq)AmQ)3sisq{raD4(k*bHvpCe-TdWq^NRTEVM)i9xbgQ&ccnUVx* zEY%vS%gDcSg=!tuIK8$Th2_((_h^+7;R|G{n06&O2#6%LK`a}n?h_fL18btz<@lFG za}xS}u?#DBMB> zw^b($1Z)`9G?eP95EKi&$eOy@K%h;ryrR3la%;>|o*>CgB(s>dDcNOXg}CK9SPmD? zmr-s{0wRmxUnbDrYfRvnZ@d z6johZ2sMX{YkGSKWd}m|@V7`Degt-43=2M?+jR%8{(H$&MLLmS;-|JxnX2pnz;el1jsvqQz}pGSF<`mqEXRQ5sC4#BbwnB_4` zc5bFE-Gb#JV3tox9fp-vVEN{(tOCpRse`S+@)?%pz+zVJXSooTrNCUg`R6`hxwb{) zC@{O6MKY8tfZ5@!yy=p5Y|#+myRL=^{tc(6YgAnkg3I(Cd!r5l;|;l-MQ8B`;*SCE z{u)uP^C$lOPM z5d~UhKhRRmvv{LIa^|oavk1$QiEApSrP@~Jjbg`<*dW4TO?4qG%a%sTPUFz(QtW5( zM)lA+5)0TvH~aBaOAs|}?u2FO;yc-CZ1gNM1dAxJ?%m?YsGR`}-xk2*dxC}r5j$d* zE!#Vtbo69h>V4V`BL%_&$} z+oJAo@jQ^Tk`;%xw-4G>hhb&)B?##U+(6Fi7nno`C<|#PVA%$Y{}N-?(Gc$1%tr4Pc}}hm~yY#fTOe!@v9s-ik$dX~|ygArPhByaXn8 zpI^FUjNWMsTFKTP3X7m?UK)3m zp6rI^_zxRYrx6_QmhoWoDR`fp4R7gu6;gdO)!KexaoO2D88F9x#TM1(9Bn7g;|?|o z)~$n&Lh#hCP6_LOPD>a)NmhW})LADx2kq=X7}7wYRj-0?dXr&bHaRWCfSqvzFa=sn z-8^gSyn-RmH=BZ{AJZ~!8n5621GbUJV7Qvs%JNv&$%Q17s_X%s-41vAPfIR>;x0Wlqr5?09S>x#%Qkt>?(&XjFRY}*L6BeQ3 z<6XEBh^S7>AbwGm@XP{RkeEKj6@_o%oV?hDuUpUJ+r#JZO?!IUc;r0R?>mi)*ZpQ) z#((dn=A#i_&EQn|hd)N$#A*fjBFuiHcYvo?@y1 z5|fV=a^a~d!c-%ZbMNqkMKiSzM{Yq=7_c&1H!mXk60Uv32dV;vMg&-kQ)Q{+PFtwc zj|-uQ;b^gts??J*9VxxOro}W~Q9j4Em|zSRv)(WSO9$F$s=Ydu%Q+5DOid~lwk&we zY%W(Z@ofdwPHncEZzZgmqS|!gTj3wQq9rxQy+^eNYKr1mj&?tm@wkO*9@UtnRMG>c aR{jt9+;fr}hV%pg00001^@s67{VYS000c7NklQEG_j zup^)eW&WUIApqy$=APz8jE@awGp)!bsTjDbrJO`$x^ZR^dr;>)LW>{ zs70vpsD38v)19rI=GNk1b(0?Js9~rjsQsu*K;@SD40RB-3^gKU-MYC7G!Bw{fZsqp zih4iIi;Hr_xZ033Iu{sQxLS=}yBXgLMn40d++>aQ0#%8D1EbGZp7+ z5=mK?t31BkVYbGOxE9`i748x`YgCMwL$qMsChbSGSE1`p{nSmadR zcQ#R)(?!~dmtD0+D2!K zR9%!Xp1oOJzm(vbLvT^$IKp@+W2=-}qTzTgVtQ!#Y7Gxz}stUIm<1;oBQ^Sh2X{F4ibaOOx;5ZGSNK z0maF^@(UtV$=p6DXLgRURwF95C=|U8?osGhgOED*b z7woJ_PWXBD>V-NjQAm{~T%sjyJ{5tn2f{G%?J!KRSrrGvQ1(^`YLA5B!~eycY(e5_ z*%aa{at13SxC(=7JT7$IQF~R3sy`Nn%EMv!$-8ZEAryB*yB1k&stni)=)8-ODo41g zkJu~roIgAih94tb=YsL%iH5@^b~kU9M-=aqgXIrbtxMpFy5mekFm#edF9z7RQ6V}R zBIhbXs~pMzt0VWy1Fi$^fh+1xxLDoK09&5&MJl(q#THjPm(0=z2H2Yfm^a&E)V+a5 zbi>08u;bJsDRUKR9(INSc7XyuWv(JsD+BB*0hS)FO&l&7MdViuur@-<-EHw>kHRGY zqoT}3fDv2-m{NhBG8X}+rgOEZ;amh*DqN?jEfQdqxdj08`Sr=C-KmT)qU1 z+9Cl)a1mgXxhQiHVB}l`m;-RpmKy?0*|yl?FXvJkFxuu!fKlcmz$kN(a}i*saM3nr z0!;a~_%Xqy24IxA2rz<+08=B-Q|2PT)O4;EaxP^6qixOv7-cRh?*T?zZU`{nIM-at zTKYWr9rJ=tppQ9I#Z#mLgINVB!pO-^FOcvFw6NhV0gztuO?g ztoA*C-52Q-Z-P#xB4HAY3KQVd%dz1S4PA3vHp0aa=zAO?FCt zC_GaTyVBg2F!bBr3U@Zy2iJgIAt>1sf$JWA9kh{;L+P*HfUBX1Zy{4MgNbDfBV_ly z!y#+753arsZUt@366jIC0klaC@ckuk!qu=pAyf7&QmiBUT^L1&tOHzsK)4n|pmrVT zs2($4=?s~VejTFHbFdDOwG;_58LkIj1Fh@{glkO#F1>a==ymJS$z;gdedT1zPx4Kj ztjS`y_C}%af-RtpehdQDt3a<=W5C4$)9W@QAse;WUry$WYmr51ml9lkeunUrE`-3e zmq1SgSOPNEE-Mf+AGJ$g0M;3@w!$Ej;hMh=v=I+Lpz^n%Pg^MgwyqOkNyu2c^of)C z1~ALor3}}+RiF*K4+4{(1%1j3pif1>sv0r^mTZ?5Jd-It!tfPfiG_p$AY*Vfak%FG z4z#;wLtw&E&?}w+eKG^=#jF7HQzr8rV0mY<1YAJ_uGz~$E13p?F^fPSzXSn$8UcI$ z8er9{5w5iv0qf8%70zV71T1IBB1N}R5Kp%NO0=5wJalZt8;xYp;b{1K) zHY>2wW-`Sl{=NpR%iu3(u6l&)rc%%cSA#aV7WCowfbFR4wcc{LQZv~o1u_`}EJA3>ki`?9CKYTA!rhO)if*zRdd}Kn zEPfYbhoVE~!FI_2YbC5qAj1kq;xP6%J8+?2PAs?`V3}nyFVD#sV3+uP`pi}{$l9U^ zSz}_M9f7RgnnRhaoIJgT8us!1aB&4!*vYF07Hp&}L zCRlop0oK4DL@ISz{2_BPlezc;xj2|I z23RlDNpi9LgTG_#(w%cMaS)%N`e>~1&a3<{Xy}>?WbF>OOLuO+j&hc^YohQ$4F&ze z+hwnro1puQjnKm;vFG~o>`kCeUIlkA-2tI?WBKCFLMBY=J{hpSsQ=PDtU$=duS_hq zHpymHt^uuV1q@uc4bFb{MdG*|VoW@15Osrqt2@8ll0qO=j*uOXn{M0UJX#SUztui9FN4)K3{9!y8PC-AHHvpVTU;x|-7P+taAtyglk#rjlH2 z5Gq8ik}BPaGiM{#Woyg;*&N9R2{J0V+WGB69cEtH7F?U~Kbi6ksi*`CFXsi931q7Y zGO82?whBhN%w1iDetv%~wM*Y;E^)@Vl?VDj-f*RX>{;o_=$fU!&KAXbuadYZ46Zbg z&6jMF=49$uL^73y;;N5jaHYv)BTyfh&`qVLYn?`o6BCA_z-0niZz=qPG!vonK3MW_ zo$V96zM!+kJRs{P-5-rQVse0VBH*n6A58)4uc&gfHMa{gIhV2fGf{st>E8sKyP-$8zp~wJX^A*@DI&-;8>gANXZj zU)R+Y)PB?=)a|Kj>8NXEu^S_h^7R`~Q&7*Kn!xyvzVv&^>?^iu;S~R2e-2fJx-oUb cX)(b1KSk$MOV07*qoM6N<$f&6$jw%VRuvdN2+38CZWny1cRtlsl+0_KtW)EU14Ei(F!UtWuj4IK+3{sK@>rh zs1Z;=(DD&U6+tlyL?UnHVN^&g6QhFi2#HS+*qz;(>63G(`|jRtW|nz$Pv7qTovP!^ zP_jES{mr@O-02w%!^a?^1ZP!_KmQiz0L~jZ=W@Qt`8wzOoclQsAS<5YdH;a(4bGLE zk8s}1If(PSIgVi!XE!5kA?~z*sobvNyohr;=Q_@h2@$6Flyej3J)D-6YfheRGl`HEcPk|~huT_2-U?PfL=4BPV)f1o!%rQ!NMt_MYw-5bUSwQ9Z&zC>u zOrl~UJglJNa%f50Ok}?WB{on`Ci`p^Y!xBA?m@rcJXLxtrE0FhRF3d*ir>yzO|BD$ z3V}HpFcCh6bTzY}Nt_(W%QYd3NG)jJ4<`F<1Od) zfQblTdC&h2lCz`>y?>|9o2CdvC8qZeIZt%jN;B7Hdn2l*k4M4MFEtq`q_#5?}c$b$pf_3y{Y!cRDafZBEj-*OD|gz#PBDeu3QoueOesLzB+O zxjf2wvf6Wwz>@AiOo2mO4=TkAV+g~%_n&R;)l#!cBxjuoD$aS-`IIJv7cdX%2{WT7 zOm%5rs(wqyPE^k5SIpUZ!&Lq4<~%{*>_Hu$2|~Xa;iX*tz8~G6O3uFOS?+)tWtdi| zV2b#;zRN!m@H&jd=!$7YY6_}|=!IU@=SjvGDFtL;aCtw06U;-v^0%k0FOyESt z1Wv$={b_H&8FiRV?MrzoHWd>%v6KTRU;-v^Miiz+@q`(BoT!+<37CKhoKb)|8!+RG z6BQFU^@fRW;s8!mOf2QViKQGk0TVER6EG1`#;Nm39Do^PoT!+<37AD!%oJe86(=et zZ~|sLzU>V-qYiU6V8$0GmU7_K8|Fd0B?+9Un1BhKAz#V~Fk^`mJtlCX#{^8^M8!me z8Yg;8-~>!e<-iG;h*0B1kBKm}hItVGY6WnjVpgnTTAC$rqQ^v)4KvOtpY|sIj@WYg zyw##ZZ5AC2IKNC;^hwg9BPk0wLStlmBr;E|$5GoAo$&Ui_;S9WY62n3)i49|T%C#i017z3J=$RF|KyZWnci*@lW4 z=AKhNN6+m`Q!V3Ye68|8y@%=am>YD0nG99M)NWc20%)gwO!96j7muR}Fr&54SxKP2 zP30S~lt=a*qDlbu3+Av57=9v&vr<6g0&`!8E2fq>I|EJGKs}t|{h7+KT@)LfIV-3K zK)r_fr2?}FFyn*MYoLC>oV-J~eavL2ho4a4^r{E-8m2hi>~hA?_vIG4a*KT;2eyl1 zh_hUvUJpNCFwBvRq5BI*srSle>c6%n`#VNsyC|MGa{(P&08p=C9+WUw9Hl<1o9T4M zdD=_C0F7#o8A_bRR?sFNmU0R6tW`ElnF8p53IdHo#S9(JoZCz}fHwJ6F<&?qrpVqE zte|m%89JQD+XwaPU#%#lVs-@-OL);|MdfINd6!XwP2h(eyafTUsoRkA%&@fe?9m@jw-v(yTTiV2(*fthQH9}SqmsRPVnwwbV$1E(_lkmo&S zF-truCU914_$jpqjr(>Ha4HkM4YMT>m~NosUu&UZ>zirfHo%N6PPs9^_o$WqPA0#5 z%tG>qFCL+b*0s?sZ;Sht0nE7Kl>OVXy=gjWxxK;OJ3yGd7-pZf7JYNcZo2*1SF`u6 zHJyRRxGw9mDlOiXqVMsNe#WX`fC`vrtjSQ%KmLcl(lC>ZOQzG^%iql2w-f_K@r?OE zwCICifM#L-HJyc7Gm>Ern?+Sk3&|Khmu4(~3qa$(m6Ub^U0E5RHq49za|XklN#?kP zl;EstdW?(_4D>kwjWy2f!LM)y?F94kyU3`W!6+AyId-89v}sXJpuic^NLL7GJItl~ zsiuB98AI-(#Mnm|=A-R6&2fwJ0JVSY#Q>&3$zFh|@;#%0qeF=j5Ajq@4i0tIIW z&}sk$&fGwoJpe&u-JeGLi^r?dO`m=y(QO{@h zQqAC7$rvz&5+mo3IqE?h=a~6m>%r5Quapvzq;{y~p zJpyXOBgD9VrW7@#p6l7O?o3feml(DtSL>D^R) zZUY%T2b0-vBAFN7VB;M88!~HuOXi4KcI6aRQ&h|XQ0A?m%j2=l1f0cGP}h(oVfJ`N zz#PpmFC*ieab)zJK<4?^k=g%OjPnkANzbAbmGZHoVRk*mTfm75s_cWVa`l*f$B@xu z5E*?&@seIo#*Y~1rBm!7sF9~~u6Wrj5oICUOuz}CS)jdNIznfzCA(stJ(7$c^e5wN z?lt>eYgbA!kvAR7zYSD&*r1$b|(@;9dcZ^67R0 zXAXJKa|5Sdmj!g578Nwt6d$sXuc&MWezA0Whd`94$h{{?1IwXP4)Tx4obDK%xoFZ_Z zjjHJ_P@R_e5blG@yEjnaJb`l;s%Lb2&=8$&Ct-fV`E^4CUs)=jTk!I}2d&n!f@)bm z@ z_4Dc86+3l2*p|~;o-Sb~oXb_RuLmoifDU^&Te$*FevycC0*nE3Xws8gsWp|Rj2>SM zns)qcYj?^2sd8?N!_w~4v+f-HCF|a$TNZDoNl$I1Uq87euoNgKb6&r26TNrfkUa@o zfdiFA@p{K&mH3b8i!lcoz)V{n8Q@g(vR4ns4r6w;K z>1~ecQR0-<^J|Ndg5fvVUM9g;lbu-){#ghGw(fg>L zh)T5Ljb%lWE;V9L!;Cqk>AV1(rULYF07ZBJbGb9qbSoLAd;in9{)95YqX$J43-dY7YU*k~vrM25 zxh5_IqO0LYZW%oxQ5HOzmk4x{atE*vipUk}sh88$b2tn?!ujEHn`tQLe&vo}nMb&{ zio`xzZ&GG6&ZyN3jnaQy#iVqXE9VT(3tWY$n-)uWDQ|tc{`?fq2F`oQ{;d3aWPg4Hp-(iE{ry>MIPWL> iW8Zci7-kcv6Uzs@r-FtIZ-&5|)J Q1PU{Fy85}Sb4q9e0B4a5jsO4v literal 0 HcmV?d00001 diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@2x.png new file mode 100644 index 0000000000000000000000000000000000000000..9da19eacad3b03bb08bbddbbf4ac48dd78b3d838 GIT binary patch literal 68 zcmeAS@N?(olHy`uVBq!ia0vp^j3CUx0wlM}@Gt=>Zci7-kcv6Uzs@r-FtIZ-&5|)J Q1PU{Fy85}Sb4q9e0B4a5jsO4v literal 0 HcmV?d00001 diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png b/speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/LaunchImage@3x.png new file mode 100644 index 0000000000000000000000000000000000000000..9da19eacad3b03bb08bbddbbf4ac48dd78b3d838 GIT binary patch literal 68 zcmeAS@N?(olHy`uVBq!ia0vp^j3CUx0wlM}@Gt=>Zci7-kcv6Uzs@r-FtIZ-&5|)J Q1PU{Fy85}Sb4q9e0B4a5jsO4v literal 0 HcmV?d00001 diff --git a/speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md b/speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md new file mode 100644 index 00000000..89c2725b --- /dev/null +++ b/speech_to_text/example/ios/Runner/Assets.xcassets/LaunchImage.imageset/README.md @@ -0,0 +1,5 @@ +# Launch Screen Assets + +You can customize the launch screen with your own desired assets by replacing the image files in this directory. + +You can also do it by opening your Flutter project's Xcode project with `open ios/Runner.xcworkspace`, selecting `Runner/Assets.xcassets` in the Project Navigator and dropping in the desired images. \ No newline at end of file diff --git a/speech_to_text/example/ios/Runner/Base.lproj/LaunchScreen.storyboard b/speech_to_text/example/ios/Runner/Base.lproj/LaunchScreen.storyboard new file mode 100644 index 00000000..f2e259c7 --- /dev/null +++ b/speech_to_text/example/ios/Runner/Base.lproj/LaunchScreen.storyboard @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/speech_to_text/example/ios/Runner/Base.lproj/Main.storyboard b/speech_to_text/example/ios/Runner/Base.lproj/Main.storyboard new file mode 100644 index 00000000..f3c28516 --- /dev/null +++ b/speech_to_text/example/ios/Runner/Base.lproj/Main.storyboard @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/speech_to_text/example/ios/Runner/Info.plist b/speech_to_text/example/ios/Runner/Info.plist new file mode 100644 index 00000000..a69c0fce --- /dev/null +++ b/speech_to_text/example/ios/Runner/Info.plist @@ -0,0 +1,49 @@ + + + + + NSMicrophoneUsageDescription + This example listens for speech on the device microphone on your request. + NSSpeechRecognitionUsageDescription + This example recognizes words as you speak them and displays them. + CFBundleDevelopmentRegion + $(DEVELOPMENT_LANGUAGE) + CFBundleExecutable + $(EXECUTABLE_NAME) + CFBundleIdentifier + $(PRODUCT_BUNDLE_IDENTIFIER) + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + speech_to_text_example + CFBundlePackageType + APPL + CFBundleShortVersionString + $(FLUTTER_BUILD_NAME) + CFBundleSignature + ???? + CFBundleVersion + $(FLUTTER_BUILD_NUMBER) + LSRequiresIPhoneOS + + UILaunchStoryboardName + LaunchScreen + UIMainStoryboardFile + Main + UISupportedInterfaceOrientations + + UIInterfaceOrientationPortrait + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + UISupportedInterfaceOrientations~ipad + + UIInterfaceOrientationPortrait + UIInterfaceOrientationPortraitUpsideDown + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + UIViewControllerBasedStatusBarAppearance + + + diff --git a/speech_to_text/example/ios/Runner/Runner-Bridging-Header.h b/speech_to_text/example/ios/Runner/Runner-Bridging-Header.h new file mode 100644 index 00000000..7335fdf9 --- /dev/null +++ b/speech_to_text/example/ios/Runner/Runner-Bridging-Header.h @@ -0,0 +1 @@ +#import "GeneratedPluginRegistrant.h" \ No newline at end of file diff --git a/speech_to_text/example/lib/main.dart b/speech_to_text/example/lib/main.dart new file mode 100644 index 00000000..0115b828 --- /dev/null +++ b/speech_to_text/example/lib/main.dart @@ -0,0 +1,275 @@ +import 'dart:async'; +import 'dart:math'; + +import 'package:flutter/material.dart'; +import 'package:permission_handler/permission_handler.dart'; +import 'package:speech_to_text/speech_recognition_error.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; +import 'package:speech_to_text/speech_to_text.dart'; + +void main() => runApp(MyApp()); + +class MyApp extends StatefulWidget { + @override + _MyAppState createState() => _MyAppState(); +} + +class _MyAppState extends State { + bool _hasSpeech = false; + double level = 0.0; + double minSoundLevel = 50000; + double maxSoundLevel = -50000; + String lastWords = ""; + String lastError = ""; + String lastStatus = ""; + String _currentLocaleId = ""; + List _localeNames = []; + final SpeechToText speech = SpeechToText(); + + @override + void initState() { + requestPermissions(); + super.initState(); + } + + Future initSpeechState() async { + bool hasSpeech = await speech.initialize( + onError: errorListener, onStatus: statusListener); + if (hasSpeech) { + _localeNames = await speech.locales(); + + var systemLocale = await speech.systemLocale(); + _currentLocaleId = systemLocale.localeId; + } + + if (!mounted) return; + + setState(() { + _hasSpeech = hasSpeech; + }); + } + + void requestPermissions() async{ + Map statuses = await [ + Permission.microphone, + ].request(); + } + + @override + Widget build(BuildContext context) { + return MaterialApp( + home: Scaffold( + appBar: AppBar( + title: const Text('Speech to Text CloudSolution'), + ), + body: Column(children: [ + Center( + child: Text( + 'Speech recognition available', + style: TextStyle(fontSize: 22.0), + ), + ), + Container( + child: Column( + children: [ + Row( + mainAxisAlignment: MainAxisAlignment.spaceAround, + children: [ + FlatButton( + child: Text('Initialize'), + onPressed: _hasSpeech ? null : initSpeechState, + ), + ], + ), + Row( + mainAxisAlignment: MainAxisAlignment.spaceAround, + children: [ + FlatButton( + child: Text('Start'), + onPressed: !_hasSpeech || speech.isListening + ? null + : startListening, + ), + FlatButton( + child: Text('Stop'), + onPressed: speech.isListening ? stopListening : null, + ), + FlatButton( + child: Text('Cancel'), + onPressed: speech.isListening ? cancelListening : null, + ), + ], + ), + Row( + mainAxisAlignment: MainAxisAlignment.spaceAround, + children: [ + DropdownButton( + onChanged: (selectedVal) => _switchLang(selectedVal), + value: _currentLocaleId, + items: _localeNames + .map( + (localeName) => DropdownMenuItem( + value: localeName.localeId, + child: Text(localeName.name), + ), + ) + .toList(), + ), + ], + ) + ], + ), + ), + Expanded( + flex: 4, + child: Column( + children: [ + Center( + child: Text( + 'Recognized Words', + style: TextStyle(fontSize: 22.0), + ), + ), + Expanded( + child: Stack( + children: [ + Container( + color: Theme.of(context).selectedRowColor, + child: Center( + child: Text( + lastWords, + textAlign: TextAlign.center, + ), + ), + ), + Positioned.fill( + bottom: 10, + child: Align( + alignment: Alignment.bottomCenter, + child: Container( + width: 40, + height: 40, + alignment: Alignment.center, + decoration: BoxDecoration( + boxShadow: [ + BoxShadow( + blurRadius: .26, + spreadRadius: level * 1.5, + color: Colors.black.withOpacity(.05)) + ], + color: Colors.white, + borderRadius: + BorderRadius.all(Radius.circular(50)), + ), + child: IconButton(icon: Icon(Icons.mic)), + ), + ), + ), + ], + ), + ), + ], + ), + ), + Expanded( + flex: 1, + child: Column( + children: [ + Center( + child: Text( + 'Error Status', + style: TextStyle(fontSize: 22.0), + ), + ), + Center( + child: Text(lastError), + ), + ], + ), + ), + Container( + padding: EdgeInsets.symmetric(vertical: 20), + color: Theme.of(context).backgroundColor, + child: Center( + child: speech.isListening + ? Text( + "I'm listening...", + style: TextStyle(fontWeight: FontWeight.bold), + ) + : Text( + 'Not listening', + style: TextStyle(fontWeight: FontWeight.bold), + ), + ), + ), + ]), + ), + ); + } + + void startListening() { + lastWords = ""; + lastError = ""; + speech.listen( + onResult: resultListener, + listenFor: Duration(seconds: 10), + localeId: _currentLocaleId, + onSoundLevelChange: soundLevelListener, + cancelOnError: true, + partialResults: true, + onDevice: true, + listenMode: ListenMode.confirmation); + setState(() {}); + } + + void stopListening() { + speech.stop(); + setState(() { + level = 0.0; + }); + } + + void cancelListening() { + speech.cancel(); + setState(() { + level = 0.0; + }); + } + + void resultListener(SpeechRecognitionResult result) { + setState(() { + lastWords = "${result.recognizedWords} - ${result.finalResult}"; + }); + } + + void soundLevelListener(double level) { + minSoundLevel = min(minSoundLevel, level); + maxSoundLevel = max(maxSoundLevel, level); + // print("sound level $level: $minSoundLevel - $maxSoundLevel "); + setState(() { + this.level = level; + }); + } + + void errorListener(SpeechRecognitionError error) { + // print("Received error status: $error, listening: ${speech.isListening}"); + setState(() { + lastError = "${error.errorMsg} - ${error.permanent}"; + }); + } + + void statusListener(String status) { + // print( + // "Received listener status: $status, listening: ${speech.isListening}"); + setState(() { + lastStatus = "$status"; + }); + } + + _switchLang(selectedVal) { + setState(() { + _currentLocaleId = selectedVal; + }); + print(selectedVal); + } +} diff --git a/speech_to_text/example/pubspec.lock b/speech_to_text/example/pubspec.lock new file mode 100644 index 00000000..e0e9b753 --- /dev/null +++ b/speech_to_text/example/pubspec.lock @@ -0,0 +1,245 @@ +# Generated by pub +# See https://dart.dev/tools/pub/glossary#lockfile +packages: + archive: + dependency: transitive + description: + name: archive + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.13" + args: + dependency: transitive + description: + name: args + url: "https://pub.dartlang.org" + source: hosted + version: "1.6.0" + async: + dependency: transitive + description: + name: async + url: "https://pub.dartlang.org" + source: hosted + version: "2.4.1" + boolean_selector: + dependency: transitive + description: + name: boolean_selector + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.0" + charcode: + dependency: transitive + description: + name: charcode + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.3" + clock: + dependency: transitive + description: + name: clock + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.1" + collection: + dependency: transitive + description: + name: collection + url: "https://pub.dartlang.org" + source: hosted + version: "1.14.12" + convert: + dependency: transitive + description: + name: convert + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.1" + crypto: + dependency: transitive + description: + name: crypto + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.4" + cupertino_icons: + dependency: "direct main" + description: + name: cupertino_icons + url: "https://pub.dartlang.org" + source: hosted + version: "0.1.3" + flutter: + dependency: "direct main" + description: flutter + source: sdk + version: "0.0.0" + flutter_test: + dependency: "direct dev" + description: flutter + source: sdk + version: "0.0.0" + image: + dependency: transitive + description: + name: image + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.12" + json_annotation: + dependency: transitive + description: + name: json_annotation + url: "https://pub.dartlang.org" + source: hosted + version: "3.0.1" + matcher: + dependency: transitive + description: + name: matcher + url: "https://pub.dartlang.org" + source: hosted + version: "0.12.6" + meta: + dependency: transitive + description: + name: meta + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.8" + nested: + dependency: transitive + description: + name: nested + url: "https://pub.dartlang.org" + source: hosted + version: "0.0.4" + path: + dependency: transitive + description: + name: path + url: "https://pub.dartlang.org" + source: hosted + version: "1.6.4" + permission_handler: + dependency: "direct main" + description: + name: permission_handler + url: "https://pub.dartlang.org" + source: hosted + version: "5.0.1+1" + permission_handler_platform_interface: + dependency: transitive + description: + name: permission_handler_platform_interface + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.1" + petitparser: + dependency: transitive + description: + name: petitparser + url: "https://pub.dartlang.org" + source: hosted + version: "2.4.0" + plugin_platform_interface: + dependency: transitive + description: + name: plugin_platform_interface + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.2" + provider: + dependency: "direct main" + description: + name: provider + url: "https://pub.dartlang.org" + source: hosted + version: "4.3.1" + quiver: + dependency: transitive + description: + name: quiver + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.3" + sky_engine: + dependency: transitive + description: flutter + source: sdk + version: "0.0.99" + source_span: + dependency: transitive + description: + name: source_span + url: "https://pub.dartlang.org" + source: hosted + version: "1.7.0" + speech_to_text: + dependency: "direct dev" + description: + path: ".." + relative: true + source: path + version: "0.0.0" + stack_trace: + dependency: transitive + description: + name: stack_trace + url: "https://pub.dartlang.org" + source: hosted + version: "1.9.3" + stream_channel: + dependency: transitive + description: + name: stream_channel + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.0" + string_scanner: + dependency: transitive + description: + name: string_scanner + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.5" + term_glyph: + dependency: transitive + description: + name: term_glyph + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.0" + test_api: + dependency: transitive + description: + name: test_api + url: "https://pub.dartlang.org" + source: hosted + version: "0.2.15" + typed_data: + dependency: transitive + description: + name: typed_data + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.6" + vector_math: + dependency: transitive + description: + name: vector_math + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.8" + xml: + dependency: transitive + description: + name: xml + url: "https://pub.dartlang.org" + source: hosted + version: "3.6.1" +sdks: + dart: ">=2.7.0 <3.0.0" + flutter: ">=1.16.0 <2.0.0" diff --git a/speech_to_text/example/pubspec.yaml b/speech_to_text/example/pubspec.yaml new file mode 100644 index 00000000..d2bfcff7 --- /dev/null +++ b/speech_to_text/example/pubspec.yaml @@ -0,0 +1,33 @@ +name: speech_to_text_example +description: Demonstrates how to use the speech_to_text plugin. +version: 1.1.0 +publish_to: 'none' + +environment: + sdk: ">=2.1.0 <3.0.0" + +dependencies: + flutter: + sdk: flutter + + cupertino_icons: ^0.1.2 + permission_handler: ^5.0.1+1 + + provider: + +dev_dependencies: + flutter_test: + sdk: flutter + + speech_to_text: + path: ../ + +# The following section is specific to Flutter. +flutter: + + uses-material-design: true + + assets: + - assets/sounds/speech_to_text_listening.m4r + - assets/sounds/speech_to_text_cancel.m4r + - assets/sounds/speech_to_text_stop.m4r diff --git a/speech_to_text/example/test/widget_test.dart b/speech_to_text/example/test/widget_test.dart new file mode 100644 index 00000000..639a52fb --- /dev/null +++ b/speech_to_text/example/test/widget_test.dart @@ -0,0 +1,27 @@ +// This is a basic Flutter widget test. +// +// To perform an interaction with a widget in your test, use the WidgetTester +// utility that Flutter provides. For example, you can send tap and scroll +// gestures. You can also use WidgetTester to find child widgets in the widget +// tree, read text, and verify that the values of widget properties are correct. + +import 'package:flutter/material.dart'; +import 'package:flutter_test/flutter_test.dart'; + +import '../lib/main.dart'; + +void main() { + testWidgets('Verify Platform version', (WidgetTester tester) async { + // Build our app and trigger a frame. + await tester.pumpWidget(MyApp()); + + // Verify that platform version is retrieved. + expect( + find.byWidgetPredicate( + (Widget widget) => + widget is Text && widget.data.startsWith('Running on:'), + ), + findsOneWidget, + ); + }); +} diff --git a/speech_to_text/ios/.gitignore b/speech_to_text/ios/.gitignore new file mode 100644 index 00000000..aa479fd3 --- /dev/null +++ b/speech_to_text/ios/.gitignore @@ -0,0 +1,37 @@ +.idea/ +.vagrant/ +.sconsign.dblite +.svn/ + +.DS_Store +*.swp +profile + +DerivedData/ +build/ +GeneratedPluginRegistrant.h +GeneratedPluginRegistrant.m + +.generated/ + +*.pbxuser +*.mode1v3 +*.mode2v3 +*.perspectivev3 + +!default.pbxuser +!default.mode1v3 +!default.mode2v3 +!default.perspectivev3 + +xcuserdata + +*.moved-aside + +*.pyc +*sync/ +Icon? +.tags* + +/Flutter/Generated.xcconfig +/Flutter/flutter_export_environment.sh \ No newline at end of file diff --git a/speech_to_text/ios/Assets/.gitkeep b/speech_to_text/ios/Assets/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/speech_to_text/ios/Classes/SpeechToTextPlugin.h b/speech_to_text/ios/Classes/SpeechToTextPlugin.h new file mode 100644 index 00000000..1785eb8f --- /dev/null +++ b/speech_to_text/ios/Classes/SpeechToTextPlugin.h @@ -0,0 +1,4 @@ +#import + +@interface SpeechToTextPlugin : NSObject +@end diff --git a/speech_to_text/ios/Classes/SpeechToTextPlugin.m b/speech_to_text/ios/Classes/SpeechToTextPlugin.m new file mode 100644 index 00000000..20d0327d --- /dev/null +++ b/speech_to_text/ios/Classes/SpeechToTextPlugin.m @@ -0,0 +1,8 @@ +#import "SpeechToTextPlugin.h" +#import + +@implementation SpeechToTextPlugin ++ (void)registerWithRegistrar:(NSObject*)registrar { + [SwiftSpeechToTextPlugin registerWithRegistrar:registrar]; +} +@end diff --git a/speech_to_text/ios/Classes/SwiftSpeechToTextPlugin.swift b/speech_to_text/ios/Classes/SwiftSpeechToTextPlugin.swift new file mode 100644 index 00000000..68687967 --- /dev/null +++ b/speech_to_text/ios/Classes/SwiftSpeechToTextPlugin.swift @@ -0,0 +1,580 @@ +import Flutter +import UIKit +import Speech +import os.log +import Try + +public enum SwiftSpeechToTextMethods: String { + case has_permission + case initialize + case listen + case stop + case cancel + case locales + case unknown // just for testing +} + +public enum SwiftSpeechToTextCallbackMethods: String { + case textRecognition + case notifyStatus + case notifyError + case soundLevelChange +} + +public enum SpeechToTextStatus: String { + case listening + case notListening + case unavailable + case available +} + +public enum SpeechToTextErrors: String { + case onDeviceError + case noRecognizerError + case listenFailedError + case missingOrInvalidArg +} + +public enum ListenMode: Int { + case deviceDefault = 0 + case dictation = 1 + case search = 2 + case confirmation = 3 +} + +struct SpeechRecognitionWords : Codable { + let recognizedWords: String + let confidence: Decimal +} + +struct SpeechRecognitionResult : Codable { + let alternates: [SpeechRecognitionWords] + let finalResult: Bool +} + +struct SpeechRecognitionError : Codable { + let errorMsg: String + let permanent: Bool +} + +enum SpeechToTextError: Error { + case runtimeError(String) +} + + +@available(iOS 10.0, *) +public class SwiftSpeechToTextPlugin: NSObject, FlutterPlugin { + private var channel: FlutterMethodChannel + private var registrar: FlutterPluginRegistrar + private var recognizer: SFSpeechRecognizer? + private var currentRequest: SFSpeechAudioBufferRecognitionRequest? + private var currentTask: SFSpeechRecognitionTask? + private var listeningSound: AVAudioPlayer? + private var successSound: AVAudioPlayer? + private var cancelSound: AVAudioPlayer? + private var rememberedAudioCategory: AVAudioSession.Category? + private var previousLocale: Locale? + private var onPlayEnd: (() -> Void)? + private var returnPartialResults: Bool = true + private var failedListen: Bool = false + private var listening = false + private let audioSession = AVAudioSession.sharedInstance() + private let audioEngine = AVAudioEngine() + private let jsonEncoder = JSONEncoder() + private let busForNodeTap = 0 + private let speechBufferSize: AVAudioFrameCount = 1024 + private static var subsystem = Bundle.main.bundleIdentifier! + private let pluginLog = OSLog(subsystem: "com.csdcorp.speechToText", category: "plugin") + + public static func register(with registrar: FlutterPluginRegistrar) { + let channel = FlutterMethodChannel(name: "plugin.csdcorp.com/speech_to_text", binaryMessenger: registrar.messenger()) + let instance = SwiftSpeechToTextPlugin( channel, registrar: registrar ) + registrar.addMethodCallDelegate(instance, channel: channel ) + } + + init( _ channel: FlutterMethodChannel, registrar: FlutterPluginRegistrar ) { + self.channel = channel + self.registrar = registrar + } + + public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) { + switch call.method { + case SwiftSpeechToTextMethods.has_permission.rawValue: + hasPermission( result ) + case SwiftSpeechToTextMethods.initialize.rawValue: + initialize( result ) + case SwiftSpeechToTextMethods.listen.rawValue: + guard let argsArr = call.arguments as? Dictionary, + let partialResults = argsArr["partialResults"] as? Bool, let onDevice = argsArr["onDevice"] as? Bool, let listenModeIndex = argsArr["listenMode"] as? Int + else { + DispatchQueue.main.async { + result(FlutterError( code: SpeechToTextErrors.missingOrInvalidArg.rawValue, + message:"Missing arg partialResults, onDevice, and listenMode are required", + details: nil )) + } + return + } + var localeStr: String? = nil + if let localeParam = argsArr["localeId"] as? String { + localeStr = localeParam + } + guard let listenMode = ListenMode(rawValue: listenModeIndex) else { + DispatchQueue.main.async { + result(FlutterError( code: SpeechToTextErrors.missingOrInvalidArg.rawValue, + message:"invalid value for listenMode, must be 0-2, was \(listenModeIndex)", + details: nil )) + } + return + } + + listenForSpeech( result, localeStr: localeStr, partialResults: partialResults, onDevice: onDevice, listenMode: listenMode ) + case SwiftSpeechToTextMethods.stop.rawValue: + stopSpeech( result ) + case SwiftSpeechToTextMethods.cancel.rawValue: + cancelSpeech( result ) + case SwiftSpeechToTextMethods.locales.rawValue: + locales( result ) + default: + os_log("Unrecognized method: %{PUBLIC}@", log: pluginLog, type: .error, call.method) + DispatchQueue.main.async { + result( FlutterMethodNotImplemented) + } + } + } + + private func hasPermission( _ result: @escaping FlutterResult) { + let has = SFSpeechRecognizer.authorizationStatus() == SFSpeechRecognizerAuthorizationStatus.authorized && + AVAudioSession.sharedInstance().recordPermission == AVAudioSession.RecordPermission.granted + DispatchQueue.main.async { + result( has ) + } + } + + private func initialize( _ result: @escaping FlutterResult) { + var success = false + let status = SFSpeechRecognizer.authorizationStatus() + switch status { + case SFSpeechRecognizerAuthorizationStatus.notDetermined: + SFSpeechRecognizer.requestAuthorization({(status)->Void in + success = status == SFSpeechRecognizerAuthorizationStatus.authorized + if ( success ) { + AVAudioSession.sharedInstance().requestRecordPermission({(granted: Bool)-> Void in + if granted { + self.setupSpeechRecognition(result) + } else{ + self.sendBoolResult( false, result ); + os_log("User denied permission", log: self.pluginLog, type: .info) + } + }) + } + else { + self.sendBoolResult( false, result ); + } + }); + case SFSpeechRecognizerAuthorizationStatus.denied: + os_log("Permission permanently denied", log: self.pluginLog, type: .info) + sendBoolResult( false, result ); + case SFSpeechRecognizerAuthorizationStatus.restricted: + os_log("Device restriction prevented initialize", log: self.pluginLog, type: .info) + sendBoolResult( false, result ); + default: + os_log("Has permissions continuing with setup", log: self.pluginLog, type: .debug) + setupSpeechRecognition(result) + } + } + + fileprivate func sendBoolResult( _ value: Bool, _ result: @escaping FlutterResult) { + DispatchQueue.main.async { + result( value ) + } + } + + fileprivate func setupListeningSound() { + listeningSound = loadSound("assets/sounds/speech_to_text_listening.m4r") + successSound = loadSound("assets/sounds/speech_to_text_stop.m4r") + cancelSound = loadSound("assets/sounds/speech_to_text_cancel.m4r") + } + + fileprivate func loadSound( _ assetPath: String ) -> AVAudioPlayer? { + var player: AVAudioPlayer? = nil + let soundKey = registrar.lookupKey(forAsset: assetPath ) + guard !soundKey.isEmpty else { + return player + } + if let soundPath = Bundle.main.path(forResource: soundKey, ofType:nil) { + let soundUrl = URL(fileURLWithPath: soundPath ) + do { + player = try AVAudioPlayer(contentsOf: soundUrl ) + player?.delegate = self + } catch { + // no audio + } + } + return player + } + + private func setupSpeechRecognition( _ result: @escaping FlutterResult) { + setupRecognizerForLocale( locale: Locale.current ) + guard recognizer != nil else { + sendBoolResult( false, result ); + return + } + recognizer?.delegate = self + setupListeningSound() + + sendBoolResult( true, result ); + } + + private func setupRecognizerForLocale( locale: Locale ) { + if ( previousLocale == locale ) { + return + } + previousLocale = locale + recognizer = SFSpeechRecognizer( locale: locale ) + } + + private func getLocale( _ localeStr: String? ) -> Locale { + guard let aLocaleStr = localeStr else { + return Locale.current + } + let locale = Locale(identifier: aLocaleStr) + return locale + } + + private func stopSpeech( _ result: @escaping FlutterResult) { + if ( !listening ) { + sendBoolResult( false, result ); + return + } + stopAllPlayers() + if let sound = successSound { + onPlayEnd = {() -> Void in + self.currentTask?.finish() + self.stopCurrentListen( ) + self.sendBoolResult( true, result ) + return + } + sound.play() + } + else { + stopCurrentListen( ) + sendBoolResult( true, result ); + } + } + + private func cancelSpeech( _ result: @escaping FlutterResult) { + if ( !listening ) { + sendBoolResult( false, result ); + return + } + stopAllPlayers() + if let sound = cancelSound { + onPlayEnd = {() -> Void in + self.currentTask?.cancel() + self.stopCurrentListen( ) + self.sendBoolResult( true, result ) + return + } + sound.play() + } + else { + self.currentTask?.cancel() + stopCurrentListen( ) + sendBoolResult( true, result ); + } + } + + private func stopAllPlayers() { + cancelSound?.stop() + successSound?.stop() + listeningSound?.stop() + } + + private func stopCurrentListen( ) { + stopAllPlayers() + currentRequest?.endAudio() + + do { + try trap { + self.audioEngine.stop() + } + } + catch { + os_log("Error stopping engine: %{PUBLIC}@", log: pluginLog, type: .error, error.localizedDescription) + } + do { + try trap { + let inputNode = self.audioEngine.inputNode + inputNode.removeTap(onBus: self.busForNodeTap); + } + } + catch { + os_log("Error removing trap: %{PUBLIC}@", log: pluginLog, type: .error, error.localizedDescription) + } + do { + if let rememberedAudioCategory = rememberedAudioCategory { + try self.audioSession.setCategory(rememberedAudioCategory) + } + } + catch { + os_log("Error stopping listen: %{PUBLIC}@", log: pluginLog, type: .error, error.localizedDescription) + } + do { + try self.audioSession.setActive(false, options: .notifyOthersOnDeactivation) + } + catch { + os_log("Error deactivation: %{PUBLIC}@", log: pluginLog, type: .info, error.localizedDescription) + } + currentRequest = nil + currentTask = nil + onPlayEnd = nil + listening = false + } + + private func listenForSpeech( _ result: @escaping FlutterResult, localeStr: String?, partialResults: Bool, onDevice: Bool, listenMode: ListenMode ) { + if ( nil != currentTask || listening ) { + sendBoolResult( false, result ); + return + } + do { + // let inErrorTest = true + failedListen = false + returnPartialResults = partialResults + setupRecognizerForLocale(locale: getLocale(localeStr)) + guard let localRecognizer = recognizer else { + result(FlutterError( code: SpeechToTextErrors.noRecognizerError.rawValue, + message:"Failed to create speech recognizer", + details: nil )) + return + } + if ( onDevice ) { + if #available(iOS 13.0, *), !localRecognizer.supportsOnDeviceRecognition { + result(FlutterError( code: SpeechToTextErrors.onDeviceError.rawValue, + message:"on device recognition is not supported on this device", + details: nil )) + } + } + rememberedAudioCategory = self.audioSession.category + try self.audioSession.setCategory(AVAudioSession.Category.playAndRecord, options: .defaultToSpeaker) + // try self.audioSession.setMode(AVAudioSession.Mode.measurement) + try self.audioSession.setMode(AVAudioSession.Mode.default) + try self.audioSession.setActive(true, options: .notifyOthersOnDeactivation) + if let sound = listeningSound { + self.onPlayEnd = {()->Void in + if ( !self.failedListen ) { + self.listening = true + self.invokeFlutter( SwiftSpeechToTextCallbackMethods.notifyStatus, arguments: SpeechToTextStatus.listening.rawValue ) + + } + } + sound.play() + } + self.audioEngine.reset(); + let inputNode = self.audioEngine.inputNode + if(inputNode.inputFormat(forBus: 0).channelCount == 0){ + throw SpeechToTextError.runtimeError("Not enough available inputs.") + } + self.currentRequest = SFSpeechAudioBufferRecognitionRequest() + guard let currentRequest = self.currentRequest else { + sendBoolResult( false, result ); + return + } + currentRequest.shouldReportPartialResults = true + if #available(iOS 13.0, *), onDevice { + currentRequest.requiresOnDeviceRecognition = true + } + switch listenMode { + case ListenMode.dictation: + currentRequest.taskHint = SFSpeechRecognitionTaskHint.dictation + break + case ListenMode.search: + currentRequest.taskHint = SFSpeechRecognitionTaskHint.search + break + case ListenMode.confirmation: + currentRequest.taskHint = SFSpeechRecognitionTaskHint.confirmation + break + default: + break + } + self.currentTask = self.recognizer?.recognitionTask(with: currentRequest, delegate: self ) + let recordingFormat = inputNode.outputFormat(forBus: self.busForNodeTap) + try trap { + inputNode.installTap(onBus: self.busForNodeTap, bufferSize: self.speechBufferSize, format: recordingFormat) { (buffer: AVAudioPCMBuffer, when: AVAudioTime) in + currentRequest.append(buffer) + self.updateSoundLevel( buffer: buffer ) + } + } + // if ( inErrorTest ){ + // throw SpeechToTextError.runtimeError("for testing only") + // } + self.audioEngine.prepare() + try self.audioEngine.start() + if nil == listeningSound { + listening = true + self.invokeFlutter( SwiftSpeechToTextCallbackMethods.notifyStatus, arguments: SpeechToTextStatus.listening.rawValue ) + } + sendBoolResult( true, result ); + } + catch { + failedListen = true + os_log("Error starting listen: %{PUBLIC}@", log: pluginLog, type: .error, error.localizedDescription) + stopCurrentListen() + sendBoolResult( false, result ); + invokeFlutter( SwiftSpeechToTextCallbackMethods.notifyStatus, arguments: SpeechToTextStatus.notListening.rawValue ) + let speechError = SpeechRecognitionError(errorMsg: "error_listen_failed", permanent: true ) + do { + let errorResult = try jsonEncoder.encode(speechError) + invokeFlutter( SwiftSpeechToTextCallbackMethods.notifyError, arguments: String( data:errorResult, encoding: .utf8) ) + } catch { + os_log("Could not encode JSON", log: pluginLog, type: .error) + } + } + } + + private func updateSoundLevel( buffer: AVAudioPCMBuffer) { + guard + let channelData = buffer.floatChannelData + else { + return + } + + let channelDataValue = channelData.pointee + let channelDataValueArray = stride(from: 0, + to: Int(buffer.frameLength), + by: buffer.stride).map{ channelDataValue[$0] } + let frameLength = Float(buffer.frameLength) + let rms = sqrt(channelDataValueArray.map{ $0 * $0 }.reduce(0, +) / frameLength ) + let avgPower = 20 * log10(rms) + self.invokeFlutter( SwiftSpeechToTextCallbackMethods.soundLevelChange, arguments: avgPower ) + } + + /// Build a list of localId:name with the current locale first + private func locales( _ result: @escaping FlutterResult ) { + var localeNames = [String](); + let locales = SFSpeechRecognizer.supportedLocales(); + let currentLocale = Locale.current + if let idName = buildIdNameForLocale(forIdentifier: currentLocale.identifier ) { + localeNames.append(idName) + } + for locale in locales { + if ( locale.identifier == currentLocale.identifier) { + continue + } + if let idName = buildIdNameForLocale(forIdentifier: locale.identifier ) { + localeNames.append(idName) + } + } + DispatchQueue.main.async { + result(localeNames) + } + } + + private func buildIdNameForLocale( forIdentifier: String ) -> String? { + var idName: String? + if let name = Locale.current.localizedString(forIdentifier: forIdentifier ) { + let sanitizedName = name.replacingOccurrences(of: ":", with: " ") + idName = "\(forIdentifier):\(sanitizedName)" + } + return idName + } + + private func handleResult( _ transcriptions: [SFTranscription], isFinal: Bool ) { + if ( !isFinal && !returnPartialResults ) { + return + } + var speechWords: [SpeechRecognitionWords] = [] + for transcription in transcriptions { + let words: SpeechRecognitionWords = SpeechRecognitionWords(recognizedWords: transcription.formattedString, confidence: confidenceIn( transcription)) + speechWords.append( words ) + } + let speechInfo = SpeechRecognitionResult(alternates: speechWords, finalResult: isFinal ) + do { + let speechMsg = try jsonEncoder.encode(speechInfo) + if let speechStr = String( data:speechMsg, encoding: .utf8) { + os_log("Encoded JSON result: %{PUBLIC}@", log: pluginLog, type: .debug, speechStr ) + invokeFlutter( SwiftSpeechToTextCallbackMethods.textRecognition, arguments: speechStr ) + } + } catch { + os_log("Could not encode JSON", log: pluginLog, type: .error) + } + } + + private func confidenceIn( _ transcription: SFTranscription ) -> Decimal { + guard ( transcription.segments.count > 0 ) else { + return 0; + } + var totalConfidence: Float = 0.0; + for segment in transcription.segments { + totalConfidence += segment.confidence + } + let avgConfidence: Float = totalConfidence / Float(transcription.segments.count ) + let confidence: Float = (avgConfidence * 1000).rounded() / 1000 + return Decimal( string: String( describing: confidence ) )! + } + + private func invokeFlutter( _ method: SwiftSpeechToTextCallbackMethods, arguments: Any? ) { + DispatchQueue.main.async { + self.channel.invokeMethod( method.rawValue, arguments: arguments ) + } + } + +} + +@available(iOS 10.0, *) +extension SwiftSpeechToTextPlugin : SFSpeechRecognizerDelegate { + public func speechRecognizer(_ speechRecognizer: SFSpeechRecognizer, availabilityDidChange available: Bool) { + let availability = available ? SpeechToTextStatus.available.rawValue : SpeechToTextStatus.unavailable.rawValue + os_log("Availability changed: %{PUBLIC}@", log: pluginLog, type: .debug, availability) + invokeFlutter( SwiftSpeechToTextCallbackMethods.notifyStatus, arguments: availability ) + } +} + +@available(iOS 10.0, *) +extension SwiftSpeechToTextPlugin : SFSpeechRecognitionTaskDelegate { + public func speechRecognitionDidDetectSpeech(_ task: SFSpeechRecognitionTask) { + // Do nothing for now + } + + public func speechRecognitionTaskFinishedReadingAudio(_ task: SFSpeechRecognitionTask) { + reportError(source: "FinishedReadingAudio", error: task.error) + invokeFlutter( SwiftSpeechToTextCallbackMethods.notifyStatus, arguments: SpeechToTextStatus.notListening.rawValue ) + } + + public func speechRecognitionTaskWasCancelled(_ task: SFSpeechRecognitionTask) { + reportError(source: "TaskWasCancelled", error: task.error) + invokeFlutter( SwiftSpeechToTextCallbackMethods.notifyStatus, arguments: SpeechToTextStatus.notListening.rawValue ) + } + + public func speechRecognitionTask(_ task: SFSpeechRecognitionTask, didFinishSuccessfully successfully: Bool) { + reportError(source: "FinishSuccessfully", error: task.error) + stopCurrentListen( ) + } + + public func speechRecognitionTask(_ task: SFSpeechRecognitionTask, didHypothesizeTranscription transcription: SFTranscription) { + reportError(source: "HypothesizeTranscription", error: task.error) + handleResult( [transcription], isFinal: false ) + } + + public func speechRecognitionTask(_ task: SFSpeechRecognitionTask, didFinishRecognition recognitionResult: SFSpeechRecognitionResult) { + reportError(source: "FinishRecognition", error: task.error) + let isFinal = recognitionResult.isFinal + handleResult( recognitionResult.transcriptions, isFinal: isFinal ) + } + + private func reportError( source: String, error: Error?) { + if ( nil != error) { + os_log("%{PUBLIC}@ with error: %{PUBLIC}@", log: pluginLog, type: .debug, source, error.debugDescription) + } + } +} + +@available(iOS 10.0, *) +extension SwiftSpeechToTextPlugin : AVAudioPlayerDelegate { + + public func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, + successfully flag: Bool) { + if let playEnd = self.onPlayEnd { + playEnd() + } + } +} diff --git a/speech_to_text/ios/speech_to_text.podspec b/speech_to_text/ios/speech_to_text.podspec new file mode 100644 index 00000000..1db79aa0 --- /dev/null +++ b/speech_to_text/ios/speech_to_text.podspec @@ -0,0 +1,22 @@ +# +# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html +# +Pod::Spec.new do |s| + s.name = 'speech_to_text' + s.version = '0.0.1' + s.summary = 'A new flutter plugin project.' + s.description = <<-DESC +A new flutter plugin project. + DESC + s.homepage = 'http://example.com' + s.license = { :file => '../LICENSE' } + s.author = { 'Your Company' => 'email@example.com' } + s.source = { :path => '.' } + s.source_files = 'Classes/**/*' + s.public_header_files = 'Classes/**/*.h' + s.dependency 'Flutter' + s.dependency 'Try' + + s.ios.deployment_target = '8.0' +end + diff --git a/speech_to_text/lib/speech_recognition_error.dart b/speech_to_text/lib/speech_recognition_error.dart new file mode 100644 index 00000000..2ab6cd4d --- /dev/null +++ b/speech_to_text/lib/speech_recognition_error.dart @@ -0,0 +1,44 @@ +import 'package:json_annotation/json_annotation.dart'; + +part 'speech_recognition_error.g.dart'; + +/// A single error returned from the underlying speech services. +/// +/// Errors are either transient or permanent. Permanent errors +/// block speech recognition from continuing and must be +/// addressed before recogntion will work. Transient errors +/// cause individual recognition sessions to fail but subsequent +/// attempts may well succeed. +@JsonSerializable() +class SpeechRecognitionError { + /// Use this to differentiate the various error conditions. + /// + /// Not meant for display to the user. + final String errorMsg; + + /// True means that recognition cannot continue until + /// the error is resolved. + final bool permanent; + + SpeechRecognitionError(this.errorMsg, this.permanent); + + factory SpeechRecognitionError.fromJson(Map json) => + _$SpeechRecognitionErrorFromJson(json); + Map toJson() => _$SpeechRecognitionErrorToJson(this); + + @override + String toString() { + return "SpeechRecognitionError msg: $errorMsg, permanent: $permanent"; + } + + @override + bool operator ==(Object other) { + return identical(this, other) || + other is SpeechRecognitionError && + errorMsg == other.errorMsg && + permanent == other.permanent; + } + + @override + int get hashCode => errorMsg.hashCode; +} diff --git a/speech_to_text/lib/speech_recognition_error.g.dart b/speech_to_text/lib/speech_recognition_error.g.dart new file mode 100644 index 00000000..65299f6d --- /dev/null +++ b/speech_to_text/lib/speech_recognition_error.g.dart @@ -0,0 +1,22 @@ +// GENERATED CODE - DO NOT MODIFY BY HAND + +part of 'speech_recognition_error.dart'; + +// ************************************************************************** +// JsonSerializableGenerator +// ************************************************************************** + +SpeechRecognitionError _$SpeechRecognitionErrorFromJson( + Map json) { + return SpeechRecognitionError( + json['errorMsg'] as String, + json['permanent'] as bool, + ); +} + +Map _$SpeechRecognitionErrorToJson( + SpeechRecognitionError instance) => + { + 'errorMsg': instance.errorMsg, + 'permanent': instance.permanent, + }; diff --git a/speech_to_text/lib/speech_recognition_event.dart b/speech_to_text/lib/speech_recognition_event.dart new file mode 100644 index 00000000..71729365 --- /dev/null +++ b/speech_to_text/lib/speech_recognition_event.dart @@ -0,0 +1,30 @@ +import 'package:speech_to_text/speech_recognition_error.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; + +enum SpeechRecognitionEventType { + finalRecognitionEvent, + partialRecognitionEvent, + errorEvent, + statusChangeEvent, + soundLevelChangeEvent, +} + +/// A single event in a stream of speech recognition events. +/// +/// Use [eventType] to determine what type of event it is and depending on that +/// use the other properties to get information about it. +class SpeechRecognitionEvent { + final SpeechRecognitionEventType eventType; + final SpeechRecognitionError _error; + final SpeechRecognitionResult _result; + final bool _listening; + final double _level; + + SpeechRecognitionEvent( + this.eventType, this._result, this._error, this._listening, this._level); + + bool get isListening => _listening; + double get level => _level; + SpeechRecognitionResult get recognitionResult => _result; + SpeechRecognitionError get error => _error; +} diff --git a/speech_to_text/lib/speech_recognition_result.dart b/speech_to_text/lib/speech_recognition_result.dart new file mode 100644 index 00000000..38509f65 --- /dev/null +++ b/speech_to_text/lib/speech_recognition_result.dart @@ -0,0 +1,140 @@ +import 'dart:collection'; + +import 'package:json_annotation/json_annotation.dart'; + +part 'speech_recognition_result.g.dart'; + +/// A sequence of recognized words from the speech recognition +/// service. +/// +/// Depending on the platform behaviour the words may come in all +/// at once at the end or as partial results as each word is +/// recognized. Use the [finalResult] flag to determine if the +/// result is considered final by the platform. +@JsonSerializable(explicitToJson: true) +class SpeechRecognitionResult { + List _alternates; + + /// Returns a list of possible transcriptions of the speech. + /// + /// The first value is always the same as the [recognizedWords] + /// value. Use the confidence for each alternate transcription + /// to determine how likely it is. Note that not all platforms + /// do a good job with confidence, there are convenience methods + /// on [SpeechRecogntionWords] to work with possibly missing + /// confidence values. + List get alternates => + UnmodifiableListView(_alternates); + + /// The sequence of words that is the best transcription of + /// what was said. + /// + /// This is the same as the first value of [alternates]. + String get recognizedWords => + _alternates.isNotEmpty ? _alternates.first.recognizedWords : ""; + + /// False means the words are an interim result, true means + /// they are the final recognition. + final bool finalResult; + + /// The confidence that the [recognizedWords] are correct. + /// + /// Confidence is expressed as a value between 0 and 1. -1 + /// means that the confidence value was not available. + double get confidence => + _alternates.isNotEmpty ? _alternates.first.confidence : 0; + + /// true if there is confidence in this recognition, false otherwise. + /// + /// There are two separate ways for there to be confidence, the first + /// is if the confidence is missing, which is indicated by a value of + /// -1. The second is if the confidence is greater than or equal + /// [threshold]. If [threshold] is not provided it defaults to 0.8. + bool isConfident( + {double threshold = SpeechRecognitionWords.confidenceThreshold}) => + _alternates.isNotEmpty + ? _alternates.first.isConfident(threshold: threshold) + : false; + + /// true if [confidence] is not the [missingConfidence] value, false + /// otherwise. + bool get hasConfidenceRating => + _alternates.isNotEmpty ? _alternates.first.hasConfidenceRating : false; + + SpeechRecognitionResult(this._alternates, this.finalResult); + + @override + String toString() { + return "SpeechRecognitionResult words: $_alternates, final: $finalResult"; + } + + @override + bool operator ==(Object other) { + return identical(this, other) || + other is SpeechRecognitionResult && + recognizedWords == other.recognizedWords && + finalResult == other.finalResult; + } + + @override + int get hashCode => recognizedWords.hashCode; + + factory SpeechRecognitionResult.fromJson(Map json) => + _$SpeechRecognitionResultFromJson(json); + Map toJson() => _$SpeechRecognitionResultToJson(this); +} + +/// A set of words recognized in a [SpeechRecognitionResult]. +/// +/// Each result will have one or more [SpeechRecognitionWords] +/// with a varying degree of confidence about each set of words. +@JsonSerializable() +class SpeechRecognitionWords { + /// The sequence of words recognized + final String recognizedWords; + + /// The confidence that the [recognizedWords] are correct. + /// + /// Confidence is expressed as a value between 0 and 1. 0 + /// means that the confidence value was not available. Use + /// [isConfident] which will ignore 0 values automatically. + final double confidence; + + static const double confidenceThreshold = 0.8; + static const double missingConfidence = -1; + + const SpeechRecognitionWords(this.recognizedWords, this.confidence); + + /// true if there is confidence in this recognition, false otherwise. + /// + /// There are two separate ways for there to be confidence, the first + /// is if the confidence is missing, which is indicated by a value of + /// -1. The second is if the confidence is greater than or equal + /// [threshold]. If [threshold] is not provided it defaults to 0.8. + bool isConfident({double threshold = confidenceThreshold}) => + confidence == missingConfidence || confidence >= threshold; + + /// true if [confidence] is not the [missingConfidence] value, false + /// otherwise. + bool get hasConfidenceRating => confidence != missingConfidence; + + @override + String toString() { + return "SpeechRecognitionWords words: $recognizedWords, confidence: $confidence"; + } + + @override + bool operator ==(Object other) { + return identical(this, other) || + other is SpeechRecognitionWords && + recognizedWords == other.recognizedWords && + confidence == other.confidence; + } + + @override + int get hashCode => recognizedWords.hashCode; + + factory SpeechRecognitionWords.fromJson(Map json) => + _$SpeechRecognitionWordsFromJson(json); + Map toJson() => _$SpeechRecognitionWordsToJson(this); +} diff --git a/speech_to_text/lib/speech_recognition_result.g.dart b/speech_to_text/lib/speech_recognition_result.g.dart new file mode 100644 index 00000000..023e5485 --- /dev/null +++ b/speech_to_text/lib/speech_recognition_result.g.dart @@ -0,0 +1,41 @@ +// GENERATED CODE - DO NOT MODIFY BY HAND + +part of 'speech_recognition_result.dart'; + +// ************************************************************************** +// JsonSerializableGenerator +// ************************************************************************** + +SpeechRecognitionResult _$SpeechRecognitionResultFromJson( + Map json) { + return SpeechRecognitionResult( + (json['alternates'] as List) + ?.map((e) => e == null + ? null + : SpeechRecognitionWords.fromJson(e as Map)) + ?.toList(), + json['finalResult'] as bool, + ); +} + +Map _$SpeechRecognitionResultToJson( + SpeechRecognitionResult instance) => + { + 'alternates': instance.alternates?.map((e) => e?.toJson())?.toList(), + 'finalResult': instance.finalResult, + }; + +SpeechRecognitionWords _$SpeechRecognitionWordsFromJson( + Map json) { + return SpeechRecognitionWords( + json['recognizedWords'] as String, + (json['confidence'] as num)?.toDouble(), + ); +} + +Map _$SpeechRecognitionWordsToJson( + SpeechRecognitionWords instance) => + { + 'recognizedWords': instance.recognizedWords, + 'confidence': instance.confidence, + }; diff --git a/speech_to_text/lib/speech_to_text.dart b/speech_to_text/lib/speech_to_text.dart new file mode 100644 index 00000000..343706e6 --- /dev/null +++ b/speech_to_text/lib/speech_to_text.dart @@ -0,0 +1,511 @@ +import 'dart:async'; +import 'dart:convert'; +import 'dart:math'; + +import 'package:clock/clock.dart'; +import 'package:flutter/foundation.dart'; +import 'package:flutter/services.dart'; +import 'package:speech_to_text/speech_recognition_error.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; + +enum ListenMode { + deviceDefault, + dictation, + search, + confirmation, +} + +/// Notified as words are recognized with the current set of recognized words. +/// +/// See the [onResult] argument on the [listen] method for use. +typedef SpeechResultListener = void Function(SpeechRecognitionResult result); + +/// Notified if errors occur during recognition or intialization. +/// +/// Possible errors per the Android docs are described here: +/// https://developer.android.com/reference/android/speech/SpeechRecognizer +/// "error_audio_error" +/// "error_client" +/// "error_permission" +/// "error_network" +/// "error_network_timeout" +/// "error_no_match" +/// "error_busy" +/// "error_server" +/// "error_speech_timeout" +/// See the [onError] argument on the [initialize] method for use. +typedef SpeechErrorListener = void Function( + SpeechRecognitionError errorNotification); + +/// Notified when recognition status changes. +/// +/// See the [onStatus] argument on the [initialize] method for use. +typedef SpeechStatusListener = void Function(String status); + +/// Notified when the sound level changes during a listen method. +/// +/// [level] is a measure of the decibels of the current sound on +/// the recognition input. See the [onSoundLevelChange] argument on +/// the [listen] method for use. +typedef SpeechSoundLevelChange = Function(double level); + +/// An interface to device specific speech recognition services. +/// +/// The general flow of a speech recognition session is as follows: +/// ```Dart +/// SpeechToText speech = SpeechToText(); +/// bool isReady = await speech.initialize(); +/// if ( isReady ) { +/// await speech.listen( resultListener: resultListener ); +/// } +/// ... +/// // At some point later +/// speech.stop(); +/// ``` +class SpeechToText { + static const String listenMethod = 'listen'; + static const String textRecognitionMethod = 'textRecognition'; + static const String notifyErrorMethod = 'notifyError'; + static const String notifyStatusMethod = 'notifyStatus'; + static const String soundLevelChangeMethod = "soundLevelChange"; + static const String notListeningStatus = "notListening"; + static const String listeningStatus = "listening"; + + static const MethodChannel speechChannel = + const MethodChannel('plugin.csdcorp.com/speech_to_text'); + static final SpeechToText _instance = + SpeechToText.withMethodChannel(speechChannel); + bool _initWorked = false; + bool _recognized = false; + bool _listening = false; + bool _cancelOnError = false; + bool _partialResults = false; + int _listenStartedAt = 0; + int _lastSpeechEventAt = 0; + Duration _pauseFor; + Duration _listenFor; + + /// True if not listening or the user called cancel / stop, false + /// if cancel/stop were invoked by timeout or error condition. + bool _userEnded = false; + String _lastRecognized = ""; + String _lastStatus = ""; + double _lastSoundLevel = 0; + Timer _listenTimer; + LocaleName _systemLocale; + SpeechRecognitionError _lastError; + SpeechResultListener _resultListener; + SpeechErrorListener errorListener; + SpeechStatusListener statusListener; + SpeechSoundLevelChange _soundLevelChange; + + final MethodChannel channel; + factory SpeechToText() => _instance; + + @visibleForTesting + SpeechToText.withMethodChannel(this.channel); + + /// True if words have been recognized during the current [listen] call. + /// + /// Goes false as soon as [cancel] is called. + bool get hasRecognized => _recognized; + + /// The last set of recognized words received. + /// + /// This is maintained across [cancel] calls but cleared on the next + /// [listen]. + String get lastRecognizedWords => _lastRecognized; + + /// The last status update received, see [initialize] to register + /// an optional listener to be notified when this changes. + String get lastStatus => _lastStatus; + + /// The last sound level received during a listen event. + /// + /// The sound level is a measure of how loud the current + /// input is during listening. Use the [onSoundLevelChange] + /// argument in the [listen] method to get notified of + /// changes. + double get lastSoundLevel => _lastSoundLevel; + + /// True if [initialize] succeeded + bool get isAvailable => _initWorked; + + /// True if [listen] succeeded and [stop] or [cancel] has not been called. + /// + /// Also goes false when listening times out if listenFor was set. + bool get isListening => _listening; + bool get isNotListening => !isListening; + + /// The last error received or null if none, see [initialize] to + /// register an optional listener to be notified of errors. + SpeechRecognitionError get lastError => _lastError; + + /// True if an error has been received, see [lastError] for details + bool get hasError => null != lastError; + + /// Returns true if the user has already granted permission to access the + /// microphone, does not prompt the user. + /// + /// This method can be called before [initialize] to check if permission + /// has already been granted. If this returns false then the [initialize] + /// call will prompt the user for permission if it is allowed to do so. + /// Note that applications cannot ask for permission again if the user has + /// denied them permission in the past. + Future get hasPermission async { + bool hasPermission = await channel.invokeMethod('has_permission'); + return hasPermission; + } + + /// Initialize speech recognition services, returns true if + /// successful, false if failed. + /// + /// This method must be called before any other speech functions. + /// If this method returns false no further [SpeechToText] methods + /// should be used. Should only be called once if successful but does protect + /// itself if called repeatedly. False usually means that the user has denied + /// permission to use speech. The usual option in that case is to give them + /// instructions on how to open system settings and grant permission. + /// + /// [onError] is an optional listener for errors like + /// timeout, or failure of the device speech recognition. + /// [onStatus] is an optional listener for status changes from + /// listening to not listening. + /// [debugLogging] controls whether there is detailed logging from the underlying + /// plugins. It is off by default, usually only useful for troubleshooting issues + /// with a paritcular OS version or device, fairly verbose + Future initialize( + {SpeechErrorListener onError, + SpeechStatusListener onStatus, + debugLogging = false}) async { + if (_initWorked) { + return Future.value(_initWorked); + } + errorListener = onError; + statusListener = onStatus; + channel.setMethodCallHandler(_handleCallbacks); + _initWorked = await channel + .invokeMethod('initialize', {"debugLogging": debugLogging}); + return _initWorked; + } + + /// Stops the current listen for speech if active, does nothing if not. + /// + /// Stopping a listen session will cause a final result to be sent. Each + /// listen session should be ended with either [stop] or [cancel], for + /// example in the dispose method of a Widget. [cancel] is automatically + /// invoked by a permanent error if [cancelOnError] is set to true in the + /// [listen] call. + /// + /// *Note:* Cannot be used until a successful [initialize] call. Should + /// only be used after a successful [listen] call. + Future stop() async { + _userEnded = true; + return _stop(); + } + + Future _stop() async { + if (!_initWorked) { + return; + } + _shutdownListener(); + await channel.invokeMethod('stop'); + } + + /// Cancels the current listen for speech if active, does nothing if not. + /// + /// Canceling means that there will be no final result returned from the + /// recognizer. Each listen session should be ended with either [stop] or + /// [cancel], for example in the dispose method of a Widget. [cancel] is + /// automatically invoked by a permanent error if [cancelOnError] is set + /// to true in the [listen] call. + /// + /// *Note* Cannot be used until a successful [initialize] call. Should only + /// be used after a successful [listen] call. + Future cancel() async { + _userEnded = true; + return _cancel(); + } + + Future _cancel() async { + if (!_initWorked) { + return; + } + _shutdownListener(); + await channel.invokeMethod('cancel'); + } + + /// Starts a listening session for speech and converts it to text, + /// invoking the provided [onResult] method as words are recognized. + /// + /// Cannot be used until a successful [initialize] call. There is a + /// time limit on listening imposed by both Android and iOS. The time + /// depends on the device, network, etc. Android is usually quite short, + /// especially if there is no active speech event detected, on the order + /// of ten seconds or so. + /// + /// When listening is done always invoke either [cancel] or [stop] to + /// end the session, even if it times out. [cancelOnError] provides an + /// automatic way to ensure this happens. + /// + /// [onResult] is an optional listener that is notified when words + /// are recognized. + /// + /// [listenFor] sets the maximum duration that it will listen for, after + /// that it automatically stops the listen for you. + /// + /// [pauseFor] sets the maximum duration of a pause in speech with no words + /// detected, after that it automatically stops the listen for you. + /// + /// [localeId] is an optional locale that can be used to listen in a language + /// other than the current system default. See [locales] to find the list of + /// supported languages for listening. + /// + /// [onSoundLevelChange] is an optional listener that is notified when the + /// sound level of the input changes. Use this to update the UI in response to + /// more or less input. The values currently differ between Ancroid and iOS, + /// haven't yet been able to determine from the Android documentation what the + /// value means. On iOS the value returned is in decibels. + /// + /// [cancelOnError] if true then listening is automatically canceled on a + /// permanent error. This defaults to false. When false cancel should be + /// called from the error handler. + /// + /// [partialResults] if true the listen reports results as they are recognized, + /// when false only final results are reported. Defaults to true. + /// + /// [onDevice] if true the listen attempts to recognize locally with speech never + /// leaving the device. If it cannot do this the listen attempt will fail. This is + /// usually only needed for sensitive content where privacy or security is a concern. + Future listen( + {SpeechResultListener onResult, + Duration listenFor, + Duration pauseFor, + String localeId, + SpeechSoundLevelChange onSoundLevelChange, + cancelOnError = false, + partialResults = true, + onDevice = false, + ListenMode listenMode = ListenMode.confirmation}) async { + if (!_initWorked) { + throw SpeechToTextNotInitializedException(); + } + _userEnded = false; + _cancelOnError = cancelOnError; + _recognized = false; + _resultListener = onResult; + _soundLevelChange = onSoundLevelChange; + _partialResults = partialResults; + Map listenParams = { + "partialResults": partialResults || null != pauseFor, + "onDevice": onDevice, + "listenMode": listenMode.index, + }; + if (null != localeId) { + listenParams["localeId"] = localeId; + } + try { + bool started = await channel.invokeMethod(listenMethod, listenParams); + if (started) { + _listenStartedAt = clock.now().millisecondsSinceEpoch; + _setupListenAndPause(pauseFor, listenFor); + } + } on PlatformException catch (e) { + throw ListenFailedException(e.details); + } + } + + void _setupListenAndPause(Duration pauseFor, Duration listenFor) { + _pauseFor = null; + _listenFor = null; + if (null == pauseFor && null == listenFor) { + return; + } + var minDuration; + if (null == pauseFor) { + _listenFor = Duration(milliseconds: listenFor.inMilliseconds); + minDuration = listenFor; + } else if (null == listenFor) { + _pauseFor = Duration(milliseconds: pauseFor.inMilliseconds); + minDuration = pauseFor; + } else { + _listenFor = Duration(milliseconds: listenFor.inMilliseconds); + _pauseFor = Duration(milliseconds: pauseFor.inMilliseconds); + var minMillis = min(listenFor.inMilliseconds - _elapsedListenMillis, + pauseFor.inMilliseconds); + minDuration = Duration(milliseconds: minMillis); + } + _listenTimer = Timer(minDuration, _stopOnPauseOrListen); + } + + int get _elapsedListenMillis => + clock.now().millisecondsSinceEpoch - _listenStartedAt; + int get _elapsedSinceSpeechEvent => + clock.now().millisecondsSinceEpoch - _lastSpeechEventAt; + + void _stopOnPauseOrListen() { + if (null != _listenFor && + _elapsedListenMillis >= _listenFor.inMilliseconds) { + _stop(); + } else if (null != _pauseFor && + _elapsedSinceSpeechEvent >= _pauseFor.inMilliseconds) { + _stop(); + } else { + _setupListenAndPause(_pauseFor, _listenFor); + } + } + + /// returns the list of speech locales available on the device. + /// + /// This method is useful to find the identifier to use + /// for the [listen] method, it is the [localeId] member of the + /// [LocaleName]. + /// + /// Each [LocaleName] in the returned list has the + /// identifier for the locale as well as a name for + /// display. The name is localized for the system locale on + /// the device. + Future> locales() async { + if (!_initWorked) { + throw SpeechToTextNotInitializedException(); + } + final List locales = await channel.invokeMethod('locales'); + List filteredLocales = locales + .map((locale) { + var components = locale.split(":"); + if (components.length != 2) { + return null; + } + return LocaleName(components[0], components[1]); + }) + .where((item) => item != null) + .toList(); + if (filteredLocales.isNotEmpty) { + _systemLocale = filteredLocales.first; + } else { + _systemLocale = null; + } + filteredLocales.sort((ln1, ln2) => ln1.name.compareTo(ln2.name)); + return filteredLocales; + } + + /// returns the locale that will be used if no localeId is passed + /// to the [listen] method. + Future systemLocale() async { + if (null == _systemLocale) { + await locales(); + } + return Future.value(_systemLocale); + } + + Future _handleCallbacks(MethodCall call) async { + // print("SpeechToText call: ${call.method} ${call.arguments}"); + switch (call.method) { + case textRecognitionMethod: + if (call.arguments is String) { + _onTextRecognition(call.arguments); + } + break; + case notifyErrorMethod: + if (call.arguments is String) { + await _onNotifyError(call.arguments); + } + break; + case notifyStatusMethod: + if (call.arguments is String) { + _onNotifyStatus(call.arguments); + } + break; + case soundLevelChangeMethod: + if (call.arguments is double) { + _onSoundLevelChange(call.arguments); + } + break; + default: + } + } + + void _onTextRecognition(String resultJson) { + _lastSpeechEventAt = clock.now().millisecondsSinceEpoch; + Map resultMap = jsonDecode(resultJson); + SpeechRecognitionResult speechResult = + SpeechRecognitionResult.fromJson(resultMap); + if (!_partialResults && !speechResult.finalResult) { + return; + } + _recognized = true; + // print("Recognized text $resultJson"); + + _lastRecognized = speechResult.recognizedWords; + if (null != _resultListener) { + _resultListener(speechResult); + } + } + + Future _onNotifyError(String errorJson) async { + if (isNotListening && _userEnded) { + return; + } + Map errorMap = jsonDecode(errorJson); + SpeechRecognitionError speechError = + SpeechRecognitionError.fromJson(errorMap); + _lastError = speechError; + if (null != errorListener) { + errorListener(speechError); + } + if (_cancelOnError && speechError.permanent) { + await _cancel(); + } + } + + void _onNotifyStatus(String status) { + _lastStatus = status; + _listening = status == listeningStatus; + // print(status); + if (null != statusListener) { + statusListener(status); + } + } + + void _onSoundLevelChange(double level) { + if (isNotListening) { + return; + } + _lastSoundLevel = level; + if (null != _soundLevelChange) { + _soundLevelChange(level); + } + } + + _shutdownListener() { + _listening = false; + _recognized = false; + _listenTimer?.cancel(); + _listenTimer = null; + } + + @visibleForTesting + Future processMethodCall(MethodCall call) async { + return await _handleCallbacks(call); + } +} + +/// A single locale with a [name], localized to the current system locale, +/// and a [localeId] which can be used in the [listen] method to choose a +/// locale for speech recognition. +class LocaleName { + final String localeId; + final String name; + LocaleName(this.localeId, this.name); +} + +/// Thrown when a method is called that requires successful +/// initialization first. +class SpeechToTextNotInitializedException implements Exception {} + +/// Thrown when listen fails to properly start a speech listening session +/// on the device +class ListenFailedException implements Exception { + final String details; + ListenFailedException(this.details); +} diff --git a/speech_to_text/lib/speech_to_text_provider.dart b/speech_to_text/lib/speech_to_text_provider.dart new file mode 100644 index 00000000..2093db44 --- /dev/null +++ b/speech_to_text/lib/speech_to_text_provider.dart @@ -0,0 +1,200 @@ +import 'dart:async'; + +import 'package:flutter/material.dart'; +import 'package:speech_to_text/speech_recognition_error.dart'; +import 'package:speech_to_text/speech_recognition_event.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; +import 'package:speech_to_text/speech_to_text.dart'; + +/// Simplifies interaction with [SpeechToText] by handling all the callbacks and notifying +/// listeners as events happen. +/// +/// Here's an example of using the [SpeechToTextProvider] +/// ``` +/// var speechProvider = SpeechToTextProvider( SpeechToText()); +/// var available = await speechProvider.initialize(); +/// StreamSubscription _subscription; +/// _subscription = speechProvider.recognitionController.stream.listen((recognitionEvent) { +/// if (recognitionEvent.eventType == SpeechRecognitionEventType.finalRecognitionEvent ) { +/// print("I heard: ${recognitionEvent.recognitionResult.recognizedWords}"); +/// } +/// }); +/// speechProvider.addListener(() { +/// var words = speechProvider.lastWords; +/// }); +class SpeechToTextProvider extends ChangeNotifier { + final StreamController _recognitionController = + StreamController.broadcast(); + final SpeechToText _speechToText; + SpeechRecognitionResult _lastResult; + double _lastLevel = 0; + List _locales = []; + LocaleName _systemLocale; + + /// Only construct one instance in an application. + /// + /// Do not call `initialize` on the [SpeechToText] that is passed as a parameter, instead + /// call the [initialize] method on this class. + SpeechToTextProvider(this._speechToText); + + Stream get stream => _recognitionController.stream; + + /// Returns the last result received, may be null. + SpeechRecognitionResult get lastResult => _lastResult; + + /// Returns the last error received, may be null. + SpeechRecognitionError get lastError => _speechToText.lastError; + + /// Returns the last sound level received. + /// + /// Note this is only available when the `soundLevel` is set to true on + /// a call to [listen], will be 0 at all other times. + double get lastLevel => _lastLevel; + + /// Initializes the provider and the contained [SpeechToText] instance. + /// + /// Returns true if [SpeechToText] was initialized successful and can now + /// be used, false otherwse. + Future initialize() async { + if (isAvailable) { + return isAvailable; + } + bool availableBefore = _speechToText.isAvailable; + bool available = + await _speechToText.initialize(onStatus: _onStatus, onError: _onError); + if (available) { + _locales = []; + _locales.addAll(await _speechToText.locales()); + _systemLocale = await _speechToText.systemLocale(); + } + if (availableBefore != available) { + notifyListeners(); + } + return available; + } + + /// Returns true if the provider has been initialized and can be used to recognize speech. + bool get isAvailable => _speechToText.isAvailable; + + /// Returns true if the provider cannot be used to recognize speech, either because it has not + /// yet been initialized or because initialization failed. + bool get isNotAvailable => !_speechToText.isAvailable; + + /// Returns true if [SpeechToText] is listening for new speech. + bool get isListening => _speechToText.isListening; + + /// Returns true if [SpeechToText] is not listening for new speech. + bool get isNotListening => _speechToText.isNotListening; + + /// Returns true if [SpeechToText] has a previous error. + bool get hasError => _speechToText.hasError; + + /// Returns true if [lastResult] has a last result. + bool get hasResults => null != _lastResult; + + /// Returns the list of locales that are available on the device for speech recognition. + List get locales => _locales; + + /// Returns the locale that is currently set as active on the device. + LocaleName get systemLocale => _systemLocale; + + /// Start listening for new events, set [partialResults] to true to receive interim + /// recognition results. + /// + /// [soundLevel] set to true to be notified on changes to the input sound level + /// on the microphone. + /// + /// [listenFor] sets the maximum duration that it will listen for, after + /// that it automatically stops the listen for you. + /// + /// [pauseFor] sets the maximum duration of a pause in speech with no words + /// detected, after that it automatically stops the listen for you. + /// + /// Call this only after a successful [initialize] call + void listen( + {bool partialResults = false, + bool soundLevel = false, + Duration listenFor, + Duration pauseFor}) { + _lastLevel = 0; + _lastResult = null; + if (soundLevel) { + _speechToText.listen( + partialResults: partialResults, + listenFor: listenFor, + pauseFor: pauseFor, + cancelOnError: true, + onResult: _onListenResult, + onSoundLevelChange: _onSoundLevelChange); + } else { + _speechToText.listen( + partialResults: partialResults, + listenFor: listenFor, + pauseFor: pauseFor, + cancelOnError: true, + onResult: _onListenResult); + } + } + + /// Stops a current active listening session. + /// + /// Call this after calling [listen] to stop the recognizer from listening further + /// and return the current result as final. + void stop() { + _speechToText.stop(); + notifyListeners(); + } + + /// Cancel a current active listening session. + /// + /// Call this after calling [listen] to stop the recognizer from listening further + /// and ignore any results recognized so far. + void cancel() { + _speechToText.cancel(); + notifyListeners(); + } + + void _onError(SpeechRecognitionError errorNotification) { + _recognitionController.add(SpeechRecognitionEvent( + SpeechRecognitionEventType.errorEvent, + null, + errorNotification, + isListening, + null)); + notifyListeners(); + } + + void _onStatus(String status) { + _recognitionController.add(SpeechRecognitionEvent( + SpeechRecognitionEventType.statusChangeEvent, + null, + null, + isListening, + null)); + notifyListeners(); + } + + void _onListenResult(SpeechRecognitionResult result) { + _lastResult = result; + _recognitionController.add(SpeechRecognitionEvent( + result.finalResult + ? SpeechRecognitionEventType.finalRecognitionEvent + : SpeechRecognitionEventType.partialRecognitionEvent, + result, + null, + isListening, + null)); + notifyListeners(); + } + + void _onSoundLevelChange(double level) { + _lastLevel = level; + _recognitionController.add(SpeechRecognitionEvent( + SpeechRecognitionEventType.soundLevelChangeEvent, + null, + null, + null, + level)); + notifyListeners(); + } +} diff --git a/speech_to_text/pubspec.lock b/speech_to_text/pubspec.lock new file mode 100644 index 00000000..7877604f --- /dev/null +++ b/speech_to_text/pubspec.lock @@ -0,0 +1,483 @@ +# Generated by pub +# See https://dart.dev/tools/pub/glossary#lockfile +packages: + _fe_analyzer_shared: + dependency: transitive + description: + name: _fe_analyzer_shared + url: "https://pub.dartlang.org" + source: hosted + version: "5.0.0" + analyzer: + dependency: transitive + description: + name: analyzer + url: "https://pub.dartlang.org" + source: hosted + version: "0.39.13" + archive: + dependency: transitive + description: + name: archive + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.13" + args: + dependency: transitive + description: + name: args + url: "https://pub.dartlang.org" + source: hosted + version: "1.6.0" + async: + dependency: transitive + description: + name: async + url: "https://pub.dartlang.org" + source: hosted + version: "2.4.1" + boolean_selector: + dependency: transitive + description: + name: boolean_selector + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.0" + build: + dependency: transitive + description: + name: build + url: "https://pub.dartlang.org" + source: hosted + version: "1.3.0" + build_config: + dependency: transitive + description: + name: build_config + url: "https://pub.dartlang.org" + source: hosted + version: "0.4.2" + build_daemon: + dependency: transitive + description: + name: build_daemon + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.4" + build_resolvers: + dependency: transitive + description: + name: build_resolvers + url: "https://pub.dartlang.org" + source: hosted + version: "1.3.10" + build_runner: + dependency: "direct dev" + description: + name: build_runner + url: "https://pub.dartlang.org" + source: hosted + version: "1.10.0" + build_runner_core: + dependency: transitive + description: + name: build_runner_core + url: "https://pub.dartlang.org" + source: hosted + version: "5.2.0" + built_collection: + dependency: transitive + description: + name: built_collection + url: "https://pub.dartlang.org" + source: hosted + version: "4.3.2" + built_value: + dependency: transitive + description: + name: built_value + url: "https://pub.dartlang.org" + source: hosted + version: "7.1.0" + charcode: + dependency: transitive + description: + name: charcode + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.3" + checked_yaml: + dependency: transitive + description: + name: checked_yaml + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.2" + clock: + dependency: "direct main" + description: + name: clock + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.1" + code_builder: + dependency: transitive + description: + name: code_builder + url: "https://pub.dartlang.org" + source: hosted + version: "3.4.0" + collection: + dependency: transitive + description: + name: collection + url: "https://pub.dartlang.org" + source: hosted + version: "1.14.12" + convert: + dependency: transitive + description: + name: convert + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.1" + crypto: + dependency: transitive + description: + name: crypto + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.4" + csslib: + dependency: transitive + description: + name: csslib + url: "https://pub.dartlang.org" + source: hosted + version: "0.16.1" + dart_style: + dependency: transitive + description: + name: dart_style + url: "https://pub.dartlang.org" + source: hosted + version: "1.3.6" + fake_async: + dependency: "direct dev" + description: + name: fake_async + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.0" + fixnum: + dependency: transitive + description: + name: fixnum + url: "https://pub.dartlang.org" + source: hosted + version: "0.10.11" + flutter: + dependency: "direct main" + description: flutter + source: sdk + version: "0.0.0" + flutter_test: + dependency: "direct dev" + description: flutter + source: sdk + version: "0.0.0" + glob: + dependency: transitive + description: + name: glob + url: "https://pub.dartlang.org" + source: hosted + version: "1.2.0" + graphs: + dependency: transitive + description: + name: graphs + url: "https://pub.dartlang.org" + source: hosted + version: "0.2.0" + html: + dependency: transitive + description: + name: html + url: "https://pub.dartlang.org" + source: hosted + version: "0.14.0+3" + http_multi_server: + dependency: transitive + description: + name: http_multi_server + url: "https://pub.dartlang.org" + source: hosted + version: "2.2.0" + http_parser: + dependency: transitive + description: + name: http_parser + url: "https://pub.dartlang.org" + source: hosted + version: "3.1.4" + image: + dependency: transitive + description: + name: image + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.12" + io: + dependency: transitive + description: + name: io + url: "https://pub.dartlang.org" + source: hosted + version: "0.3.4" + js: + dependency: transitive + description: + name: js + url: "https://pub.dartlang.org" + source: hosted + version: "0.6.2" + json_annotation: + dependency: "direct main" + description: + name: json_annotation + url: "https://pub.dartlang.org" + source: hosted + version: "3.0.1" + json_serializable: + dependency: "direct dev" + description: + name: json_serializable + url: "https://pub.dartlang.org" + source: hosted + version: "3.3.0" + logging: + dependency: transitive + description: + name: logging + url: "https://pub.dartlang.org" + source: hosted + version: "0.11.4" + matcher: + dependency: transitive + description: + name: matcher + url: "https://pub.dartlang.org" + source: hosted + version: "0.12.6" + meta: + dependency: transitive + description: + name: meta + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.8" + mime: + dependency: transitive + description: + name: mime + url: "https://pub.dartlang.org" + source: hosted + version: "0.9.6+3" + node_interop: + dependency: transitive + description: + name: node_interop + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.1" + node_io: + dependency: transitive + description: + name: node_io + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.1" + package_config: + dependency: transitive + description: + name: package_config + url: "https://pub.dartlang.org" + source: hosted + version: "1.9.3" + path: + dependency: transitive + description: + name: path + url: "https://pub.dartlang.org" + source: hosted + version: "1.6.4" + pedantic: + dependency: transitive + description: + name: pedantic + url: "https://pub.dartlang.org" + source: hosted + version: "1.9.0" + petitparser: + dependency: transitive + description: + name: petitparser + url: "https://pub.dartlang.org" + source: hosted + version: "2.4.0" + pool: + dependency: transitive + description: + name: pool + url: "https://pub.dartlang.org" + source: hosted + version: "1.4.0" + pub_semver: + dependency: transitive + description: + name: pub_semver + url: "https://pub.dartlang.org" + source: hosted + version: "1.4.4" + pubspec_parse: + dependency: transitive + description: + name: pubspec_parse + url: "https://pub.dartlang.org" + source: hosted + version: "0.1.5" + quiver: + dependency: transitive + description: + name: quiver + url: "https://pub.dartlang.org" + source: hosted + version: "2.1.3" + shelf: + dependency: transitive + description: + name: shelf + url: "https://pub.dartlang.org" + source: hosted + version: "0.7.7" + shelf_web_socket: + dependency: transitive + description: + name: shelf_web_socket + url: "https://pub.dartlang.org" + source: hosted + version: "0.2.3" + sky_engine: + dependency: transitive + description: flutter + source: sdk + version: "0.0.99" + source_gen: + dependency: transitive + description: + name: source_gen + url: "https://pub.dartlang.org" + source: hosted + version: "0.9.6" + source_span: + dependency: transitive + description: + name: source_span + url: "https://pub.dartlang.org" + source: hosted + version: "1.7.0" + stack_trace: + dependency: transitive + description: + name: stack_trace + url: "https://pub.dartlang.org" + source: hosted + version: "1.9.3" + stream_channel: + dependency: transitive + description: + name: stream_channel + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.0" + stream_transform: + dependency: transitive + description: + name: stream_transform + url: "https://pub.dartlang.org" + source: hosted + version: "1.2.0" + string_scanner: + dependency: transitive + description: + name: string_scanner + url: "https://pub.dartlang.org" + source: hosted + version: "1.0.5" + term_glyph: + dependency: transitive + description: + name: term_glyph + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.0" + test_api: + dependency: transitive + description: + name: test_api + url: "https://pub.dartlang.org" + source: hosted + version: "0.2.15" + timing: + dependency: transitive + description: + name: timing + url: "https://pub.dartlang.org" + source: hosted + version: "0.1.1+2" + typed_data: + dependency: transitive + description: + name: typed_data + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.6" + vector_math: + dependency: transitive + description: + name: vector_math + url: "https://pub.dartlang.org" + source: hosted + version: "2.0.8" + watcher: + dependency: transitive + description: + name: watcher + url: "https://pub.dartlang.org" + source: hosted + version: "0.9.7+15" + web_socket_channel: + dependency: transitive + description: + name: web_socket_channel + url: "https://pub.dartlang.org" + source: hosted + version: "1.1.0" + xml: + dependency: transitive + description: + name: xml + url: "https://pub.dartlang.org" + source: hosted + version: "3.6.1" + yaml: + dependency: transitive + description: + name: yaml + url: "https://pub.dartlang.org" + source: hosted + version: "2.2.1" +sdks: + dart: ">=2.7.0 <3.0.0" + flutter: ">=1.10.0" diff --git a/speech_to_text/pubspec.yaml b/speech_to_text/pubspec.yaml new file mode 100644 index 00000000..34b3da29 --- /dev/null +++ b/speech_to_text/pubspec.yaml @@ -0,0 +1,31 @@ +name: speech_to_text +description: A Flutter plugin that exposes device specific speech to text recognition capability. + + + +environment: + sdk: ">=2.1.0 <3.0.0" + flutter: ">=1.10.0" + +dependencies: + flutter: + sdk: flutter + json_annotation: ^3.0.0 + clock: ^1.0.1 + +dev_dependencies: + flutter_test: + sdk: flutter + build_runner: ^1.0.0 + json_serializable: ^3.0.0 + fake_async: ^1.0.1 + +flutter: + plugin: + platforms: + android: + package: com.csdcorp.speech_to_text + pluginClass: SpeechToTextPlugin + ios: + pluginClass: SpeechToTextPlugin + diff --git a/speech_to_text/test/speech_recognition_error_test.dart b/speech_to_text/test/speech_recognition_error_test.dart new file mode 100644 index 00000000..202ae4cd --- /dev/null +++ b/speech_to_text/test/speech_recognition_error_test.dart @@ -0,0 +1,65 @@ +import 'dart:convert'; + +import 'package:flutter_test/flutter_test.dart'; +import 'package:speech_to_text/speech_recognition_error.dart'; + +void main() { + const String msg1 = "msg1"; + + setUp(() {}); + + group('properties', () { + test('equals true for same object', () { + SpeechRecognitionError error = SpeechRecognitionError(msg1, false); + expect(error, error); + }); + test('equals true for different object same values', () { + SpeechRecognitionError error1 = SpeechRecognitionError(msg1, false); + SpeechRecognitionError error2 = SpeechRecognitionError(msg1, false); + expect(error1, error2); + }); + test('equals false for different object', () { + SpeechRecognitionError error1 = SpeechRecognitionError(msg1, false); + SpeechRecognitionError error2 = SpeechRecognitionError("msg2", false); + expect(error1, isNot(error2)); + }); + test('hash same for same object', () { + SpeechRecognitionError error = SpeechRecognitionError(msg1, false); + expect(error.hashCode, error.hashCode); + }); + test('hash same for different object same values', () { + SpeechRecognitionError error1 = SpeechRecognitionError(msg1, false); + SpeechRecognitionError error2 = SpeechRecognitionError(msg1, false); + expect(error1.hashCode, error2.hashCode); + }); + test('hash different for different object', () { + SpeechRecognitionError error1 = SpeechRecognitionError(msg1, false); + SpeechRecognitionError error2 = SpeechRecognitionError("msg2", false); + expect(error1.hashCode, isNot(error2.hashCode)); + }); + test('toString as expected', () { + SpeechRecognitionError error1 = SpeechRecognitionError(msg1, false); + expect(error1.toString(), + "SpeechRecognitionError msg: $msg1, permanent: false"); + }); + }); + group('json', () { + test('loads properly', () { + var json = jsonDecode('{"errorMsg":"$msg1","permanent":true}'); + SpeechRecognitionError error = SpeechRecognitionError.fromJson(json); + expect(error.errorMsg, msg1); + expect(error.permanent, isTrue); + json = jsonDecode('{"errorMsg":"$msg1","permanent":false}'); + error = SpeechRecognitionError.fromJson(json); + expect(error.permanent, isFalse); + }); + test('roundtrips properly', () { + var json = jsonDecode('{"errorMsg":"$msg1","permanent":true}'); + SpeechRecognitionError error = SpeechRecognitionError.fromJson(json); + var roundtripJson = error.toJson(); + SpeechRecognitionError roundtripError = + SpeechRecognitionError.fromJson(roundtripJson); + expect(error, roundtripError); + }); + }); +} diff --git a/speech_to_text/test/speech_recognition_event_test.dart b/speech_to_text/test/speech_recognition_event_test.dart new file mode 100644 index 00000000..ceaaab8a --- /dev/null +++ b/speech_to_text/test/speech_recognition_event_test.dart @@ -0,0 +1,42 @@ +import 'package:flutter_test/flutter_test.dart'; +import 'package:speech_to_text/speech_recognition_event.dart'; + +import 'test_speech_channel_handler.dart'; + +void main() { + group('properties', () { + test('status listening matches', () { + var event = SpeechRecognitionEvent( + SpeechRecognitionEventType.statusChangeEvent, null, null, true, null); + expect(event.eventType, SpeechRecognitionEventType.statusChangeEvent); + expect(event.isListening, isTrue); + }); + test('result matches', () { + var event = SpeechRecognitionEvent( + SpeechRecognitionEventType.finalRecognitionEvent, + TestSpeechChannelHandler.firstRecognizedResult, + null, + null, + null); + expect(event.eventType, SpeechRecognitionEventType.finalRecognitionEvent); + expect(event.recognitionResult, + TestSpeechChannelHandler.firstRecognizedResult); + }); + test('error matches', () { + var event = SpeechRecognitionEvent(SpeechRecognitionEventType.errorEvent, + null, TestSpeechChannelHandler.firstError, null, null); + expect(event.eventType, SpeechRecognitionEventType.errorEvent); + expect(event.error, TestSpeechChannelHandler.firstError); + }); + test('sound level matches', () { + var event = SpeechRecognitionEvent( + SpeechRecognitionEventType.soundLevelChangeEvent, + null, + null, + null, + TestSpeechChannelHandler.level1); + expect(event.eventType, SpeechRecognitionEventType.soundLevelChangeEvent); + expect(event.level, TestSpeechChannelHandler.level1); + }); + }); +} diff --git a/speech_to_text/test/speech_recognition_result_test.dart b/speech_to_text/test/speech_recognition_result_test.dart new file mode 100644 index 00000000..1516779a --- /dev/null +++ b/speech_to_text/test/speech_recognition_result_test.dart @@ -0,0 +1,134 @@ +import 'dart:convert'; + +import 'package:flutter_test/flutter_test.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; + +void main() { + final String firstRecognizedWords = 'hello'; + final String secondRecognizedWords = 'hello there'; + final double firstConfidence = 0.85; + final double secondConfidence = 0.62; + final String firstRecognizedJson = + '{"alternates":[{"recognizedWords":"$firstRecognizedWords","confidence":$firstConfidence}],"finalResult":false}'; + final String secondRecognizedJson = + '{"alternates":[{"recognizedWords":"$secondRecognizedWords","confidence":$secondConfidence}],"finalResult":false}'; + final SpeechRecognitionWords firstWords = + SpeechRecognitionWords(firstRecognizedWords, firstConfidence); + final SpeechRecognitionWords secondWords = + SpeechRecognitionWords(secondRecognizedWords, secondConfidence); + + setUp(() {}); + + group('recognizedWords', () { + test('empty if no alternates', () { + SpeechRecognitionResult result = SpeechRecognitionResult([], true); + expect(result.recognizedWords, isEmpty); + }); + test('matches first alternate', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords, secondWords], true); + expect(result.recognizedWords, firstRecognizedWords); + }); + }); + group('alternates', () { + test('empty if no alternates', () { + SpeechRecognitionResult result = SpeechRecognitionResult([], true); + expect(result.alternates, isEmpty); + }); + test('expected contents', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords, secondWords], true); + expect(result.alternates, contains(firstWords)); + expect(result.alternates, contains(secondWords)); + }); + test('in order', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords, secondWords], true); + expect(result.alternates.first, firstWords); + }); + }); + group('confidence', () { + test('0 if no alternates', () { + SpeechRecognitionResult result = SpeechRecognitionResult([], true); + expect(result.confidence, 0); + }); + test('isConfident false if no alternates', () { + SpeechRecognitionResult result = SpeechRecognitionResult([], true); + expect(result.isConfident(), isFalse); + }); + test('isConfident matches first alternate', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords, secondWords], true); + expect(result.isConfident(), firstWords.isConfident()); + }); + test('hasConfidenceRating false if no alternates', () { + SpeechRecognitionResult result = SpeechRecognitionResult([], true); + expect(result.hasConfidenceRating, isFalse); + }); + test('hasConfidenceRating matches first alternate', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords, secondWords], true); + expect(result.hasConfidenceRating, firstWords.hasConfidenceRating); + }); + }); + group('json', () { + test('loads correctly', () { + var json = jsonDecode(firstRecognizedJson); + SpeechRecognitionResult result = SpeechRecognitionResult.fromJson(json); + expect(result.recognizedWords, firstRecognizedWords); + expect(result.confidence, firstConfidence); + }); + test('roundtrips correctly', () { + var json = jsonDecode(firstRecognizedJson); + SpeechRecognitionResult result = SpeechRecognitionResult.fromJson(json); + var roundTripJson = result.toJson(); + SpeechRecognitionResult roundtripResult = + SpeechRecognitionResult.fromJson(roundTripJson); + expect(result, roundtripResult); + }); + }); + group('overrides', () { + test('toString works with no alternates', () { + SpeechRecognitionResult result = SpeechRecognitionResult([], true); + expect( + result.toString(), "SpeechRecognitionResult words: [], final: true"); + }); + test('toString works with alternates', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords], true); + expect(result.toString(), + "SpeechRecognitionResult words: [SpeechRecognitionWords words: hello, confidence: 0.85], final: true"); + }); + test('hash same for same object', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords], true); + expect(result.hashCode, result.hashCode); + }); + test('hash differs for different objects', () { + SpeechRecognitionResult result1 = + SpeechRecognitionResult([firstWords], true); + SpeechRecognitionResult result2 = + SpeechRecognitionResult([secondWords], true); + expect(result1.hashCode, isNot(result2.hashCode)); + }); + test('equals same for same object', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords], true); + expect(result, result); + }); + test('equals same for different object same values', () { + SpeechRecognitionResult result1 = + SpeechRecognitionResult([firstWords], true); + SpeechRecognitionResult result1a = + SpeechRecognitionResult([firstWords], true); + expect(result1, result1a); + }); + test('equals differs for different objects', () { + SpeechRecognitionResult result1 = + SpeechRecognitionResult([firstWords], true); + SpeechRecognitionResult result2 = + SpeechRecognitionResult([secondWords], true); + expect(result1, isNot(result2)); + }); + }); +} diff --git a/speech_to_text/test/speech_recognitions_words_test.dart b/speech_to_text/test/speech_recognitions_words_test.dart new file mode 100644 index 00000000..36a9ef0e --- /dev/null +++ b/speech_to_text/test/speech_recognitions_words_test.dart @@ -0,0 +1,86 @@ +import 'dart:convert'; + +import 'package:flutter_test/flutter_test.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; + +void main() { + final String firstRecognizedWords = 'hello'; + final String secondRecognizedWords = 'hello there'; + final double firstConfidence = 0.85; + final double secondConfidence = 0.62; + final String firstRecognizedJson = + '{"recognizedWords":"$firstRecognizedWords","confidence":$firstConfidence}'; + final SpeechRecognitionWords firstWords = + SpeechRecognitionWords(firstRecognizedWords, firstConfidence); + final SpeechRecognitionWords secondWords = + SpeechRecognitionWords(secondRecognizedWords, secondConfidence); + + setUp(() {}); + + group('properties', () { + test('words', () { + expect(firstWords.recognizedWords, firstRecognizedWords); + expect(secondWords.recognizedWords, secondRecognizedWords); + }); + test('confidence', () { + expect(firstWords.confidence, firstConfidence); + expect(secondWords.confidence, secondConfidence); + expect(firstWords.hasConfidenceRating, isTrue); + }); + test('equals true for same object', () { + expect(firstWords, firstWords); + }); + test('equals true for different object with same values', () { + SpeechRecognitionWords firstWordsA = + SpeechRecognitionWords(firstRecognizedWords, firstConfidence); + expect(firstWords, firstWordsA); + }); + test('equals false for different results', () { + expect(firstWords, isNot(secondWords)); + }); + test('hash same for same object', () { + expect(firstWords.hashCode, firstWords.hashCode); + }); + test('hash same for different object with same values', () { + SpeechRecognitionWords firstWordsA = + SpeechRecognitionWords(firstRecognizedWords, firstConfidence); + expect(firstWords.hashCode, firstWordsA.hashCode); + }); + test('hash different for different results', () { + expect(firstWords.hashCode, isNot(secondWords.hashCode)); + }); + }); + group('isConfident', () { + test('true when >= 0.8', () { + expect(firstWords.isConfident(), isTrue); + }); + test('false when < 0.8', () { + expect(secondWords.isConfident(), isFalse); + }); + test('respects threshold', () { + expect(secondWords.isConfident(threshold: 0.5), isTrue); + }); + test('true when missing', () { + SpeechRecognitionWords words = SpeechRecognitionWords( + firstRecognizedWords, SpeechRecognitionWords.missingConfidence); + expect(words.isConfident(), isTrue); + expect(words.hasConfidenceRating, isFalse); + }); + }); + group('json', () { + test('loads correctly', () { + var json = jsonDecode(firstRecognizedJson); + SpeechRecognitionWords words = SpeechRecognitionWords.fromJson(json); + expect(words.recognizedWords, firstRecognizedWords); + expect(words.confidence, firstConfidence); + }); + test('roundtrips correctly', () { + var json = jsonDecode(firstRecognizedJson); + SpeechRecognitionWords words = SpeechRecognitionWords.fromJson(json); + var roundTripJson = words.toJson(); + SpeechRecognitionWords roundtripWords = + SpeechRecognitionWords.fromJson(roundTripJson); + expect(words, roundtripWords); + }); + }); +} diff --git a/speech_to_text/test/speech_to_text_provider_test.dart b/speech_to_text/test/speech_to_text_provider_test.dart new file mode 100644 index 00000000..25366b6f --- /dev/null +++ b/speech_to_text/test/speech_to_text_provider_test.dart @@ -0,0 +1,196 @@ +import 'package:fake_async/fake_async.dart'; +import 'package:flutter_test/flutter_test.dart'; +import 'package:speech_to_text/speech_to_text.dart'; +import 'package:speech_to_text/speech_to_text_provider.dart'; + +import 'test_speech_channel_handler.dart'; +import 'test_speech_listener.dart'; + +void main() { + SpeechToTextProvider provider; + SpeechToText speechToText; + TestSpeechChannelHandler speechHandler; + TestSpeechListener speechListener; + + TestWidgetsFlutterBinding.ensureInitialized(); + + setUp(() { + speechToText = SpeechToText.withMethodChannel(SpeechToText.speechChannel); + speechHandler = TestSpeechChannelHandler(speechToText); + speechToText.channel + .setMockMethodCallHandler(speechHandler.methodCallHandler); + provider = SpeechToTextProvider(speechToText); + speechListener = TestSpeechListener(provider); + provider.addListener(speechListener.onNotify); + }); + + tearDown(() { + speechToText.channel.setMockMethodCallHandler(null); + }); + + group('delegates', () { + test('isListening matches delegate defaults', () { + expect(provider.isListening, speechToText.isListening); + expect(provider.isNotListening, speechToText.isNotListening); + }); + test('isAvailable matches delegate defaults', () { + expect(provider.isAvailable, speechToText.isAvailable); + expect(provider.isNotAvailable, !speechToText.isAvailable); + }); + test('isAvailable matches delegate after init', () async { + expect(await provider.initialize(), isTrue); + expect(provider.isAvailable, speechToText.isAvailable); + expect(provider.isNotAvailable, !speechToText.isAvailable); + }); + test('hasError matches delegate after error', () async { + expect(await provider.initialize(), isTrue); + expect(provider.hasError, speechToText.hasError); + }); + }); + group('listening', () { + test('notifies on initialize', () async { + fakeAsync((fa) { + provider.initialize(); + fa.flushMicrotasks(); + expect(speechListener.notified, isTrue); + expect(speechListener.isAvailable, isTrue); + }); + }); + test('notifies on listening', () async { + fakeAsync((fa) { + setupForListen(provider, fa, speechListener); + expect(speechListener.notified, isTrue); + expect(speechListener.isListening, isTrue); + expect(provider.hasResults, isFalse); + }); + }); + test('notifies on final words', () async { + fakeAsync((fa) { + setupForListen(provider, fa, speechListener); + speechListener.reset(); + speechHandler.notifyFinalWords(); + fa.flushMicrotasks(); + expect(speechListener.notified, isTrue); + expect(provider.hasResults, isTrue); + var result = speechListener.recognitionResult; + expect(result.recognizedWords, + TestSpeechChannelHandler.secondRecognizedWords); + expect(result.finalResult, isTrue); + }); + }); + test('hasResult false after listening before new results', () async { + fakeAsync((fa) { + setupForListen(provider, fa, speechListener); + speechHandler.notifyFinalWords(); + provider.stop(); + setupForListen(provider, fa, speechListener); + fa.flushMicrotasks(); + expect(provider.hasResults, isFalse); + }); + }); + test('notifies on partial words', () async { + fakeAsync((fa) { + setupForListen(provider, fa, speechListener, partialResults: true); + speechListener.reset(); + speechHandler.notifyPartialWords(); + fa.flushMicrotasks(); + expect(speechListener.notified, isTrue); + expect(provider.hasResults, isTrue); + var result = speechListener.recognitionResult; + expect(result.recognizedWords, + TestSpeechChannelHandler.firstRecognizedWords); + expect(result.finalResult, isFalse); + }); + }); + }); + group('soundLevel', () { + test('notifies when requested', () async { + fakeAsync((fa) { + setupForListen(provider, fa, speechListener, + partialResults: true, soundLevel: true); + speechListener.reset(); + speechHandler.notifySoundLevel(); + fa.flushMicrotasks(); + expect(speechListener.notified, isTrue); + expect(speechListener.soundLevel, TestSpeechChannelHandler.level2); + }); + }); + test('no notification by default', () async { + fakeAsync((fa) { + setupForListen(provider, fa, speechListener, + partialResults: true, soundLevel: false); + speechListener.reset(); + speechHandler.notifySoundLevel(); + fa.flushMicrotasks(); + expect(speechListener.notified, isFalse); + expect(speechListener.soundLevel, 0); + }); + }); + }); + group('stop/cancel', () { + test('notifies on stop', () async { + fakeAsync((fa) { + provider.initialize(); + setupForListen(provider, fa, speechListener); + speechListener.reset(); + provider.stop(); + fa.flushMicrotasks(); + expect(speechListener.notified, isTrue); + expect(speechListener.isListening, isFalse); + }); + }); + test('notifies on cancel', () async { + fakeAsync((fa) { + provider.initialize(); + setupForListen(provider, fa, speechListener); + speechListener.reset(); + provider.cancel(); + fa.flushMicrotasks(); + expect(speechListener.notified, isTrue); + expect(speechListener.isListening, isFalse); + }); + }); + }); + group('error handling', () { + test('hasError matches delegate default', () async { + expect(await provider.initialize(), isTrue); + expect(provider.hasError, speechToText.hasError); + }); + test('notifies on error', () async { + fakeAsync((fa) { + provider.initialize(); + setupForListen(provider, fa, speechListener); + speechListener.reset(); + speechHandler.notifyPermanentError(); + expect(speechListener.notified, isTrue); + expect(speechListener.hasError, isTrue); + }); + }); + }); + group('locale', () { + test('locales empty before init', () async { + expect(provider.systemLocale, isNull); + expect(provider.locales, isEmpty); + }); + test('set from SpeechToText after init', () async { + fakeAsync((fa) { + speechHandler.setupLocales(); + provider.initialize(); + fa.flushMicrotasks(); + expect( + provider.systemLocale.localeId, TestSpeechChannelHandler.localeId1); + expect(provider.locales, hasLength(speechHandler.locales.length)); + }); + }); + }); +} + +void setupForListen(SpeechToTextProvider provider, FakeAsync fa, + TestSpeechListener speechListener, + {bool partialResults = false, bool soundLevel = false}) { + provider.initialize(); + fa.flushMicrotasks(); + speechListener.reset(); + provider.listen(partialResults: partialResults, soundLevel: soundLevel); + fa.flushMicrotasks(); +} diff --git a/speech_to_text/test/speech_to_text_test.dart b/speech_to_text/test/speech_to_text_test.dart new file mode 100644 index 00000000..7b4701ff --- /dev/null +++ b/speech_to_text/test/speech_to_text_test.dart @@ -0,0 +1,425 @@ +import 'package:fake_async/fake_async.dart'; +import 'package:flutter/services.dart'; +import 'package:flutter_test/flutter_test.dart'; +import 'package:speech_to_text/speech_recognition_error.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; +import 'package:speech_to_text/speech_to_text.dart'; + +import 'test_speech_channel_handler.dart'; + +void main() { + TestWidgetsFlutterBinding.ensureInitialized(); + + TestSpeechListener listener; + TestSpeechChannelHandler speechHandler; + SpeechToText speech; + + setUp(() { + listener = TestSpeechListener(); + speech = SpeechToText.withMethodChannel(SpeechToText.speechChannel); + speechHandler = TestSpeechChannelHandler(speech); + speech.channel.setMockMethodCallHandler(speechHandler.methodCallHandler); + }); + + tearDown(() { + speech.channel.setMockMethodCallHandler(null); + }); + + group('hasPermission', () { + test('true if platform reports true', () async { + expect(await speech.hasPermission, true); + }); + test('false if platform reports false', () async { + speechHandler.hasPermissionResult = false; + expect(await speech.hasPermission, false); + }); + }); + group('init', () { + test('succeeds on platform success', () async { + expect(await speech.initialize(), true); + expect(speechHandler.initInvoked, true); + expect(speech.isAvailable, true); + }); + test('only invokes once', () async { + expect(await speech.initialize(), true); + speechHandler.initInvoked = false; + expect(await speech.initialize(), true); + expect(speechHandler.initInvoked, false); + }); + test('fails on platform failure', () async { + speechHandler.initResult = false; + expect(await speech.initialize(), false); + expect(speech.isAvailable, false); + }); + }); + + group('listen', () { + test('fails with exception if not initialized', () async { + try { + await speech.listen(); + fail("Expected an exception."); + } on SpeechToTextNotInitializedException { + // This is a good result + } + }); + test('fails with exception if init fails', () async { + try { + speechHandler.initResult = false; + await speech.initialize(); + await speech.listen(); + fail("Expected an exception."); + } on SpeechToTextNotInitializedException { + // This is a good result + } + }); + test('invokes listen after successful init', () async { + await speech.initialize(); + await speech.listen(); + expect(speechHandler.listenLocale, isNull); + expect(speechHandler.listenInvoked, true); + }); + test('converts platformException to listenFailed', () async { + await speech.initialize(); + speechHandler.listenException = true; + try { + await speech.listen(); + fail("Should have thrown"); + } on ListenFailedException catch (e) { + expect(e.details, TestSpeechChannelHandler.listenExceptionDetails); + } catch (wrongE) { + fail("Should have been ListenFailedException"); + } + }); + test('stops listen after listenFor duration', () async { + fakeAsync((fa) { + speech.initialize(); + fa.flushMicrotasks(); + speech.listen(listenFor: Duration(seconds: 2)); + fa.flushMicrotasks(); + expect(speech.isListening, isTrue); + fa.elapse(Duration(seconds: 2)); + expect(speech.isListening, isFalse); + }); + }); + test('stops listen after listenFor duration even with speech event', + () async { + fakeAsync((fa) { + speech.initialize(); + fa.flushMicrotasks(); + speech.listen(listenFor: Duration(seconds: 1)); + speech.processMethodCall(MethodCall(SpeechToText.textRecognitionMethod, + TestSpeechChannelHandler.firstRecognizedJson)); + fa.flushMicrotasks(); + expect(speech.isListening, isTrue); + fa.elapse(Duration(seconds: 1)); + expect(speech.isListening, isFalse); + }); + }); + test('stops listen after pauseFor duration with no speech', () async { + fakeAsync((fa) { + speech.initialize(); + fa.flushMicrotasks(); + speech.listen(pauseFor: Duration(seconds: 2)); + fa.flushMicrotasks(); + expect(speech.isListening, isTrue); + fa.elapse(Duration(seconds: 2)); + expect(speech.isListening, isFalse); + }); + }); + test('stops listen after pauseFor with longer listenFor duration', + () async { + fakeAsync((fa) { + speech.initialize(); + fa.flushMicrotasks(); + speech.listen( + pauseFor: Duration(seconds: 1), listenFor: Duration(seconds: 5)); + fa.flushMicrotasks(); + expect(speech.isListening, isTrue); + fa.elapse(Duration(seconds: 1)); + expect(speech.isListening, isFalse); + }); + }); + test('stops listen after listenFor with longer pauseFor duration', + () async { + fakeAsync((fa) { + speech.initialize(); + fa.flushMicrotasks(); + speech.listen( + listenFor: Duration(seconds: 1), pauseFor: Duration(seconds: 5)); + fa.flushMicrotasks(); + expect(speech.isListening, isTrue); + fa.elapse(Duration(seconds: 1)); + expect(speech.isListening, isFalse); + }); + }); + test('keeps listening after pauseFor with speech event', () async { + fakeAsync((fa) { + speech.initialize(); + fa.flushMicrotasks(); + speech.listen(pauseFor: Duration(seconds: 2)); + fa.flushMicrotasks(); + fa.elapse(Duration(seconds: 1)); + speech.processMethodCall(MethodCall(SpeechToText.textRecognitionMethod, + TestSpeechChannelHandler.firstRecognizedJson)); + fa.flushMicrotasks(); + fa.elapse(Duration(seconds: 1)); + expect(speech.isListening, isTrue); + }); + }); + test('uses localeId if provided', () async { + await speech.initialize(); + await speech.listen(localeId: TestSpeechChannelHandler.localeId1); + expect(speechHandler.listenInvoked, true); + expect(speechHandler.listenLocale, TestSpeechChannelHandler.localeId1); + }); + test('calls speech listener', () async { + await speech.initialize(); + await speech.listen(onResult: listener.onSpeechResult); + await speech.processMethodCall(MethodCall( + SpeechToText.textRecognitionMethod, + TestSpeechChannelHandler.firstRecognizedJson)); + expect(listener.speechResults, 1); + expect( + listener.results, [TestSpeechChannelHandler.firstRecognizedResult]); + expect(speech.lastRecognizedWords, + TestSpeechChannelHandler.firstRecognizedWords); + }); + test('calls speech listener with multiple', () async { + await speech.initialize(); + await speech.listen(onResult: listener.onSpeechResult); + await speech.processMethodCall(MethodCall( + SpeechToText.textRecognitionMethod, + TestSpeechChannelHandler.firstRecognizedJson)); + await speech.processMethodCall(MethodCall( + SpeechToText.textRecognitionMethod, + TestSpeechChannelHandler.secondRecognizedJson)); + expect(listener.speechResults, 2); + expect(listener.results, [ + TestSpeechChannelHandler.firstRecognizedResult, + TestSpeechChannelHandler.secondRecognizedResult + ]); + expect(speech.lastRecognizedWords, + TestSpeechChannelHandler.secondRecognizedWords); + }); + }); + + group('status callback', () { + test('invoked on listen', () async { + await speech.initialize( + onError: listener.onSpeechError, onStatus: listener.onSpeechStatus); + await speech.processMethodCall(MethodCall( + SpeechToText.notifyStatusMethod, SpeechToText.listeningStatus)); + expect(listener.speechStatus, 1); + expect(listener.statuses.contains(SpeechToText.listeningStatus), true); + }); + }); + + group('soundLevel callback', () { + test('invoked on listen', () async { + await speech.initialize(); + await speech.listen(onSoundLevelChange: listener.onSoundLevel); + await speech.processMethodCall(MethodCall( + SpeechToText.soundLevelChangeMethod, + TestSpeechChannelHandler.level1)); + expect(listener.soundLevel, 1); + expect(listener.soundLevels, contains(TestSpeechChannelHandler.level1)); + }); + test('sets lastLevel', () async { + await speech.initialize(); + await speech.listen(onSoundLevelChange: listener.onSoundLevel); + await speech.processMethodCall(MethodCall( + SpeechToText.soundLevelChangeMethod, + TestSpeechChannelHandler.level1)); + expect(speech.lastSoundLevel, TestSpeechChannelHandler.level1); + }); + }); + + group('cancel', () { + test('does nothing if not initialized', () async { + speech.cancel(); + expect(speechHandler.cancelInvoked, false); + }); + test('cancels an active listen', () async { + await speech.initialize(); + await speech.listen(); + await speech.cancel(); + expect(speechHandler.cancelInvoked, true); + expect(speech.isListening, isFalse); + }); + }); + group('stop', () { + test('does nothing if not initialized', () async { + speech.stop(); + expect(speechHandler.cancelInvoked, false); + }); + test('stops an active listen', () async { + await speech.initialize(); + speech.listen(); + speech.stop(); + expect(speechHandler.stopInvoked, true); + }); + }); + group('error', () { + test('notifies handler with transient', () async { + await speech.initialize(onError: listener.onSpeechError); + await speech.listen(); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.transientErrorJson)); + expect(listener.speechErrors, 1); + expect(listener.errors.first.permanent, isFalse); + }); + test('notifies handler with permanent', () async { + await speech.initialize(onError: listener.onSpeechError); + await speech.listen(); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.permanentErrorJson)); + expect(listener.speechErrors, 1); + expect(listener.errors.first.permanent, isTrue); + }); + test('continues listening on transient', () async { + await speech.initialize(onError: listener.onSpeechError); + await speech.listen(); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.transientErrorJson)); + expect(speech.isListening, isTrue); + }); + test('continues listening on permanent if cancel not explicitly requested', + () async { + await speech.initialize(onError: listener.onSpeechError); + await speech.listen(); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.permanentErrorJson)); + expect(speech.isListening, isTrue); + }); + test('stops listening on permanent if cancel explicitly requested', + () async { + await speech.initialize(onError: listener.onSpeechError); + await speech.listen(cancelOnError: true); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.permanentErrorJson)); + expect(speech.isListening, isFalse); + }); + test('Error not sent after cancel', () async { + await speech.initialize(onError: listener.onSpeechError); + await speech.listen(); + await speech.cancel(); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.permanentErrorJson)); + expect(speech.isListening, isFalse); + expect(listener.speechErrors, 0); + }); + test('Error still sent after implicit cancel', () async { + await speech.initialize(onError: listener.onSpeechError); + await speech.listen(cancelOnError: true); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.permanentErrorJson)); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.permanentErrorJson)); + expect(speech.isListening, isFalse); + expect(listener.speechErrors, 2); + }); + }); + group('locales', () { + test('fails with exception if not initialized', () async { + try { + await speech.locales(); + fail("Expected an exception."); + } on SpeechToTextNotInitializedException { + // This is a good result + } + }); + test('system locale null if not initialized', () async { + LocaleName current; + try { + current = await speech.systemLocale(); + fail("Expected an exception."); + } on SpeechToTextNotInitializedException { + expect(current, isNull); + } + }); + test('handles an empty list', () async { + await speech.initialize(onError: listener.onSpeechError); + List localeNames = await speech.locales(); + expect(speechHandler.localesInvoked, isTrue); + expect(localeNames, isEmpty); + }); + test('returns expected locales', () async { + await speech.initialize(onError: listener.onSpeechError); + speechHandler.locales.add(TestSpeechChannelHandler.locale1); + speechHandler.locales.add(TestSpeechChannelHandler.locale2); + List localeNames = await speech.locales(); + expect(localeNames, hasLength(speechHandler.locales.length)); + expect(localeNames[0].localeId, TestSpeechChannelHandler.localeId1); + expect(localeNames[0].name, TestSpeechChannelHandler.name1); + expect(localeNames[1].localeId, TestSpeechChannelHandler.localeId2); + expect(localeNames[1].name, TestSpeechChannelHandler.name2); + }); + test('skips incorrect locales', () async { + await speech.initialize(onError: listener.onSpeechError); + speechHandler.locales.add("InvalidJunk"); + speechHandler.locales.add(TestSpeechChannelHandler.locale1); + List localeNames = await speech.locales(); + expect(localeNames, hasLength(1)); + expect(localeNames[0].localeId, TestSpeechChannelHandler.localeId1); + expect(localeNames[0].name, TestSpeechChannelHandler.name1); + }); + test('system locale matches first returned locale', () async { + await speech.initialize(onError: listener.onSpeechError); + speechHandler.locales.add(TestSpeechChannelHandler.locale1); + speechHandler.locales.add(TestSpeechChannelHandler.locale2); + LocaleName current = await speech.systemLocale(); + expect(current.localeId, TestSpeechChannelHandler.localeId1); + }); + }); + group('status', () { + test('recognized false at start', () async { + expect(speech.hasRecognized, isFalse); + }); + test('listening false at start', () async { + expect(speech.isListening, isFalse); + }); + }); + test('available false at start', () async { + expect(speech.isAvailable, isFalse); + }); + test('hasError false at start', () async { + expect(speech.hasError, isFalse); + }); + test('lastError null at start', () async { + expect(speech.lastError, isNull); + }); + test('status empty at start', () async { + expect(speech.lastStatus, isEmpty); + }); +} + +class TestSpeechListener { + int speechResults = 0; + List results = []; + int speechErrors = 0; + List errors = []; + int speechStatus = 0; + List statuses = []; + int soundLevel = 0; + List soundLevels = []; + + void onSpeechResult(SpeechRecognitionResult result) { + ++speechResults; + results.add(result); + } + + void onSpeechError(SpeechRecognitionError errorResult) { + ++speechErrors; + errors.add(errorResult); + } + + void onSpeechStatus(String status) { + ++speechStatus; + statuses.add(status); + } + + void onSoundLevel(double level) { + ++soundLevel; + soundLevels.add(level); + } +} diff --git a/speech_to_text/test/test_speech_channel_handler.dart b/speech_to_text/test/test_speech_channel_handler.dart new file mode 100644 index 00000000..a55f0670 --- /dev/null +++ b/speech_to_text/test/test_speech_channel_handler.dart @@ -0,0 +1,134 @@ +import 'package:flutter/services.dart'; +import 'package:speech_to_text/speech_recognition_error.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; +import 'package:speech_to_text/speech_to_text.dart'; + +/// Holds a set of responses and acts as a mock for the platform specific +/// implementations allowing test cases to determine what the result of +/// a call should be. +class TestSpeechChannelHandler { + final SpeechToText _speech; + + bool listenException = false; + + static const String listenExceptionCode = "listenFailedError"; + static const String listenExceptionMessage = "Failed"; + static const String listenExceptionDetails = "Device Listen Failure"; + + TestSpeechChannelHandler(this._speech); + + bool initResult = true; + bool initInvoked = false; + bool listenInvoked = false; + bool cancelInvoked = false; + bool stopInvoked = false; + bool localesInvoked = false; + bool hasPermissionResult = true; + String listeningStatusResponse = SpeechToText.listeningStatus; + String listenLocale; + List locales = []; + static const String localeId1 = "en_US"; + static const String localeId2 = "fr_CA"; + static const String name1 = "English US"; + static const String name2 = "French Canada"; + static const String locale1 = "$localeId1:$name1"; + static const String locale2 = "$localeId2:$name2"; + static const String firstRecognizedWords = 'hello'; + static const String secondRecognizedWords = 'hello there'; + static const double firstConfidence = 0.85; + static const double secondConfidence = 0.62; + static const String firstRecognizedJson = + '{"alternates":[{"recognizedWords":"$firstRecognizedWords","confidence":$firstConfidence}],"finalResult":false}'; + static const String secondRecognizedJson = + '{"alternates":[{"recognizedWords":"$secondRecognizedWords","confidence":$secondConfidence}],"finalResult":false}'; + static const String finalRecognizedJson = + '{"alternates":[{"recognizedWords":"$secondRecognizedWords","confidence":$secondConfidence}],"finalResult":true}'; + static const SpeechRecognitionWords firstWords = + SpeechRecognitionWords(firstRecognizedWords, firstConfidence); + static const SpeechRecognitionWords secondWords = + SpeechRecognitionWords(secondRecognizedWords, secondConfidence); + static final SpeechRecognitionResult firstRecognizedResult = + SpeechRecognitionResult([firstWords], false); + static final SpeechRecognitionResult secondRecognizedResult = + SpeechRecognitionResult([secondWords], false); + static final SpeechRecognitionResult finalRecognizedResult = + SpeechRecognitionResult([secondWords], true); + static const String transientErrorJson = + '{"errorMsg":"network","permanent":false}'; + static const String permanentErrorJson = + '{"errorMsg":"network","permanent":true}'; + static final SpeechRecognitionError firstError = + SpeechRecognitionError("network", true); + static const double level1 = 0.5; + static const double level2 = 10; + + Future methodCallHandler(MethodCall methodCall) async { + switch (methodCall.method) { + case "has_permission": + return hasPermissionResult; + break; + case "initialize": + initInvoked = true; + return initResult; + break; + case "cancel": + cancelInvoked = true; + return true; + break; + case "stop": + stopInvoked = true; + return true; + break; + case SpeechToText.listenMethod: + listenInvoked = true; + if (listenException) { + throw PlatformException( + code: listenExceptionCode, + message: listenExceptionMessage, + details: listenExceptionDetails); + } + listenLocale = methodCall.arguments["localeId"]; + await _speech.processMethodCall(MethodCall( + SpeechToText.notifyStatusMethod, listeningStatusResponse)); + return initResult; + break; + case "locales": + localesInvoked = true; + return locales; + break; + default: + } + return initResult; + } + + void notifyFinalWords() { + _speech.processMethodCall( + MethodCall(SpeechToText.textRecognitionMethod, finalRecognizedJson)); + } + + void notifyPartialWords() { + _speech.processMethodCall( + MethodCall(SpeechToText.textRecognitionMethod, firstRecognizedJson)); + } + + void notifyPermanentError() { + _speech.processMethodCall( + MethodCall(SpeechToText.notifyErrorMethod, permanentErrorJson)); + } + + void notifyTransientError() { + _speech.processMethodCall( + MethodCall(SpeechToText.notifyErrorMethod, transientErrorJson)); + } + + void notifySoundLevel() { + _speech.processMethodCall( + MethodCall(SpeechToText.soundLevelChangeMethod, level2)); + } + + void setupLocales() { + locales.clear(); + locales.add(locale1); + locales.add(locale2); + } +} diff --git a/speech_to_text/test/test_speech_listener.dart b/speech_to_text/test/test_speech_listener.dart new file mode 100644 index 00000000..1efcd81c --- /dev/null +++ b/speech_to_text/test/test_speech_listener.dart @@ -0,0 +1,36 @@ +import 'package:speech_to_text/speech_recognition_error.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; +import 'package:speech_to_text/speech_to_text_provider.dart'; + +/// Holds the results of notification by the [SpeechToTextProvider] +class TestSpeechListener { + final SpeechToTextProvider _speechProvider; + + bool isListening = false; + bool isFinal = false; + bool isAvailable = false; + bool notified = false; + bool hasError = false; + SpeechRecognitionResult recognitionResult; + SpeechRecognitionError lastError; + double soundLevel; + + TestSpeechListener(this._speechProvider); + + void reset() { + isListening = false; + isFinal = false; + isAvailable = false; + notified = false; + } + + void onNotify() { + notified = true; + isAvailable = _speechProvider.isAvailable; + isListening = _speechProvider.isListening; + recognitionResult = _speechProvider.lastResult; + hasError = _speechProvider.hasError; + lastError = _speechProvider.lastError; + soundLevel = _speechProvider.lastLevel; + } +}