|
|
|
|
@ -27,7 +27,7 @@ import 'package:permission_handler/permission_handler.dart';
|
|
|
|
|
import 'package:provider/provider.dart';
|
|
|
|
|
|
|
|
|
|
// import 'package:speech_to_text/speech_recognition_error.dart';
|
|
|
|
|
// import 'package:speech_to_text/speech_to_text.dart' as stt;
|
|
|
|
|
import 'package:speech_to_text/speech_to_text.dart' as stt;
|
|
|
|
|
|
|
|
|
|
class SendFeedbackPage extends StatefulWidget {
|
|
|
|
|
final AppoitmentAllHistoryResultList? appointment;
|
|
|
|
|
@ -51,7 +51,7 @@ class _SendFeedbackPageState extends State<SendFeedbackPage> {
|
|
|
|
|
MessageType messageType = MessageType.NON;
|
|
|
|
|
var _currentLocaleId;
|
|
|
|
|
|
|
|
|
|
// stt.SpeechToText speech = stt.SpeechToText();
|
|
|
|
|
stt.SpeechToText speech = stt.SpeechToText();
|
|
|
|
|
var reconizedWord;
|
|
|
|
|
int selectedStatusIndex = 5;
|
|
|
|
|
var event = RobotProvider();
|
|
|
|
|
@ -408,8 +408,8 @@ class _SendFeedbackPageState extends State<SendFeedbackPage> {
|
|
|
|
|
color: Color(0xff575757),
|
|
|
|
|
letterSpacing: -0.56,
|
|
|
|
|
),
|
|
|
|
|
// suffixIconConstraints: BoxConstraints(minWidth: 50),
|
|
|
|
|
// suffixIcon: suffixTap == null ? null : IconButton(icon: Icon(Icons.mic, color: Color(0xff2E303A)), onPressed: suffixTap),
|
|
|
|
|
suffixIconConstraints: BoxConstraints(minWidth: 50),
|
|
|
|
|
suffixIcon: suffixTap == null ? null : IconButton(icon: Icon(Icons.mic, color: Color(0xff2E303A)), onPressed: suffixTap),
|
|
|
|
|
contentPadding: EdgeInsets.zero,
|
|
|
|
|
border: InputBorder.none,
|
|
|
|
|
focusedBorder: InputBorder.none,
|
|
|
|
|
@ -524,38 +524,21 @@ class _SendFeedbackPageState extends State<SendFeedbackPage> {
|
|
|
|
|
openSpeechReco() async {
|
|
|
|
|
new RoboSearch(context: context).showAlertDialog(context);
|
|
|
|
|
_currentLocaleId = TranslationBase.of(AppGlobal.context).locale.languageCode;
|
|
|
|
|
// bool available = await speech.initialize(onStatus: statusListener, onError: errorListener);
|
|
|
|
|
// if (available) {
|
|
|
|
|
// speech.listen(
|
|
|
|
|
// onResult: resultListener,
|
|
|
|
|
// listenMode: stt.ListenMode.confirmation,
|
|
|
|
|
// localeId: _currentLocaleId == 'en' ? 'en-US' : 'ar-SA',
|
|
|
|
|
// );
|
|
|
|
|
// } else {
|
|
|
|
|
// print("The user has denied the use of speech recognition.");
|
|
|
|
|
// }
|
|
|
|
|
}
|
|
|
|
|
// openSpeechReco() async {
|
|
|
|
|
// new RoboSearch(context: context).showAlertDialog(context);
|
|
|
|
|
// _currentLocaleId = TranslationBase.of(AppGlobal.context).locale.languageCode;
|
|
|
|
|
// bool available = await speech.initialize(onStatus: statusListener, onError: errorListener);
|
|
|
|
|
// if (available) {
|
|
|
|
|
// speech.listen(
|
|
|
|
|
// onResult: resultListener,
|
|
|
|
|
// listenMode: stt.ListenMode.confirmation,
|
|
|
|
|
// localeId: _currentLocaleId == 'en' ? 'en-US' : 'ar-SA',
|
|
|
|
|
// );
|
|
|
|
|
// } else {
|
|
|
|
|
// print("The user has denied the use of speech recognition.");
|
|
|
|
|
// }
|
|
|
|
|
// }
|
|
|
|
|
//
|
|
|
|
|
// void errorListener(SpeechRecognitionError error) {
|
|
|
|
|
// event.setValue({"searchText": 'null'});
|
|
|
|
|
// //SpeechToText.closeAlertDialog(context);
|
|
|
|
|
// print(error);
|
|
|
|
|
// }
|
|
|
|
|
bool available = await speech.initialize();
|
|
|
|
|
|
|
|
|
|
if(available) {
|
|
|
|
|
speech.listen(
|
|
|
|
|
onResult: resultListener,
|
|
|
|
|
listenFor: Duration(seconds: 10),
|
|
|
|
|
localeId: _currentLocaleId == 'en' ? 'en-US' : 'ar-SA',
|
|
|
|
|
// onSoundLevelChange: soundLevelListener,
|
|
|
|
|
// cancelOnError: true,
|
|
|
|
|
// partialResults: true,
|
|
|
|
|
// onDevice: true,
|
|
|
|
|
// listenMode: stt.ListenMode.deviceDefault
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
void statusListener(String status) {
|
|
|
|
|
reconizedWord = status == 'listening' ? 'Lisening...' : 'Sorry....';
|
|
|
|
|
}
|
|
|
|
|
|