no message

dev_v3.13.6_voipcall
Sultan khan 2 years ago
parent 6a74cdd2da
commit f2a57255fc

@ -34,9 +34,11 @@ showReminderDialog(BuildContext context, DateTime dateTime, String doctorName, S
});
}
} else {
if (await Permission.calendarFullAccess.request().isGranted) {
_showReminderDialog(context, dateTime, doctorName, eventId, appoDateFormatted, appoTimeFormatted,
onSuccess: onSuccess, title: title, description: description, onMultiDateSuccess: onMultiDateSuccess ?? (int) {});
if (await Permission.calendarWriteOnly.request().isGranted) {
if (await Permission.calendarFullAccess.request().isGranted) {
_showReminderDialog(context, dateTime, doctorName, eventId, appoDateFormatted, appoTimeFormatted,
onSuccess: onSuccess, title: title, description: description, onMultiDateSuccess: onMultiDateSuccess ?? (int) {});
}
}
}
}

@ -27,7 +27,7 @@ import 'package:permission_handler/permission_handler.dart';
import 'package:provider/provider.dart';
// import 'package:speech_to_text/speech_recognition_error.dart';
// import 'package:speech_to_text/speech_to_text.dart' as stt;
import 'package:speech_to_text/speech_to_text.dart' as stt;
class SendFeedbackPage extends StatefulWidget {
final AppoitmentAllHistoryResultList? appointment;
@ -51,7 +51,7 @@ class _SendFeedbackPageState extends State<SendFeedbackPage> {
MessageType messageType = MessageType.NON;
var _currentLocaleId;
// stt.SpeechToText speech = stt.SpeechToText();
stt.SpeechToText speech = stt.SpeechToText();
var reconizedWord;
int selectedStatusIndex = 5;
var event = RobotProvider();
@ -408,8 +408,8 @@ class _SendFeedbackPageState extends State<SendFeedbackPage> {
color: Color(0xff575757),
letterSpacing: -0.56,
),
// suffixIconConstraints: BoxConstraints(minWidth: 50),
// suffixIcon: suffixTap == null ? null : IconButton(icon: Icon(Icons.mic, color: Color(0xff2E303A)), onPressed: suffixTap),
suffixIconConstraints: BoxConstraints(minWidth: 50),
suffixIcon: suffixTap == null ? null : IconButton(icon: Icon(Icons.mic, color: Color(0xff2E303A)), onPressed: suffixTap),
contentPadding: EdgeInsets.zero,
border: InputBorder.none,
focusedBorder: InputBorder.none,
@ -524,38 +524,21 @@ class _SendFeedbackPageState extends State<SendFeedbackPage> {
openSpeechReco() async {
new RoboSearch(context: context).showAlertDialog(context);
_currentLocaleId = TranslationBase.of(AppGlobal.context).locale.languageCode;
// bool available = await speech.initialize(onStatus: statusListener, onError: errorListener);
// if (available) {
// speech.listen(
// onResult: resultListener,
// listenMode: stt.ListenMode.confirmation,
// localeId: _currentLocaleId == 'en' ? 'en-US' : 'ar-SA',
// );
// } else {
// print("The user has denied the use of speech recognition.");
// }
}
// openSpeechReco() async {
// new RoboSearch(context: context).showAlertDialog(context);
// _currentLocaleId = TranslationBase.of(AppGlobal.context).locale.languageCode;
// bool available = await speech.initialize(onStatus: statusListener, onError: errorListener);
// if (available) {
// speech.listen(
// onResult: resultListener,
// listenMode: stt.ListenMode.confirmation,
// localeId: _currentLocaleId == 'en' ? 'en-US' : 'ar-SA',
// );
// } else {
// print("The user has denied the use of speech recognition.");
// }
// }
//
// void errorListener(SpeechRecognitionError error) {
// event.setValue({"searchText": 'null'});
// //SpeechToText.closeAlertDialog(context);
// print(error);
// }
bool available = await speech.initialize();
if(available) {
speech.listen(
onResult: resultListener,
listenFor: Duration(seconds: 10),
localeId: _currentLocaleId == 'en' ? 'en-US' : 'ar-SA',
// onSoundLevelChange: soundLevelListener,
// cancelOnError: true,
// partialResults: true,
// onDevice: true,
// listenMode: stt.ListenMode.deviceDefault
);
}
}
void statusListener(String status) {
reconizedWord = status == 'listening' ? 'Lisening...' : 'Sorry....';
}

@ -103,7 +103,7 @@ dependencies:
flutter_svg: ^2.0.8
#Calendar Events
manage_calendar_events: ^2.0.1
manage_calendar_events: ^2.0.2
#InAppBrowser
flutter_inappwebview: ^5.8.0

Loading…
Cancel
Save