import 'package:flutter/material.dart'; import 'package:fluttertoast/fluttertoast.dart'; import 'package:provider/provider.dart'; import 'package:speech_to_text/speech_recognition_error.dart'; import 'package:speech_to_text/speech_recognition_result.dart'; import 'package:speech_to_text/speech_to_text.dart'; import 'package:test_sa/controllers/providers/settings/setting_provider.dart'; import 'package:test_sa/views/app_style/sizing.dart'; import 'package:test_sa/views/widgets/buttons/app_icon_button2.dart'; import 'package:test_sa/views/widgets/titles/app_sub_title.dart'; class SpeechToTextButton extends StatefulWidget { final TextEditingController controller; final bool mini; const SpeechToTextButton({Key key, this.controller, this.mini = false}) : super(key: key); @override SpeechToTextButtonState createState() => SpeechToTextButtonState(); } class SpeechToTextButtonState extends State { bool _speechEnabled = false; SettingProvider _settingProvider; final SpeechToText _speechToText = SpeechToText(); /// This has to happen only once per app void _initSpeech() async { _speechEnabled = await _speechToText.initialize( onError: (SpeechRecognitionError error) async { Fluttertoast.showToast(msg: "failed to convert text to speech"); setState(() {}); }, ); } /// Each time to start a speech recognition session void _startListening() async { _speechEnabled = _speechToText.isAvailable; if (_speechToText.isListening) { Fluttertoast.showToast(msg: "Currently in use"); return; } if (!_speechEnabled) return; await _speechToText.listen( onResult: (SpeechRecognitionResult result) { widget.controller.text = result.recognizedWords; setState(() {}); }, localeId: _settingProvider.speechToText); setState(() {}); } /// Manually stop the active speech recognition session /// Note that there are also timeouts that each platform enforces /// and the SpeechToText plugin supports setting timeouts on the /// listen method. void _stopListening() async { await _speechToText.stop(); setState(() {}); } @override void initState() { _initSpeech(); widget.controller.addListener(() { setState(() {}); }); super.initState(); } @override void setState(VoidCallback fn) { if (!mounted) return; super.setState(fn); } @override Widget build(BuildContext context) { _settingProvider = Provider.of(context); return Container( padding: const EdgeInsets.only(left: 12, right: 12), decoration: BoxDecoration( color: const Color(0xfff5f5f5), border: Border.all( color: const Color(0xffefefef), ), borderRadius: BorderRadius.circular(AppStyle.borderRadius * AppStyle.getScaleFactor(context)), ), child: Row( children: [ widget.mini ? const SizedBox.shrink() : const ASubTitle("Speech To Text"), widget.controller.text.isNotEmpty ? AIconButton2( iconData: Icons.delete, onPressed: () { widget.controller.clear(); setState(() {}); }, ) : const SizedBox.shrink(), const Spacer(), TextButton( onPressed: () { if (_speechToText.isListening) return; if (_settingProvider.speechToText == "ar") { _settingProvider.setSpeechToText("en"); } else { _settingProvider.setSpeechToText("ar"); } }, child: Text(_settingProvider.speechToText)), GestureDetector( child: _speechToText.isListening ? const Icon( Icons.fiber_manual_record, color: Colors.red, ) : Icon( Icons.mic, color: Theme.of(context).colorScheme.primary, ), onTap: () async { if (!_speechEnabled) { Fluttertoast.showToast(msg: "microphone not available"); return; } if (_speechToText.isListening) { _stopListening(); } else { _startListening(); } }, ), ], ), ); } }