You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
cloudsolutions-atoms/lib/views/widgets/sound/TextSpeechRecordWidget.dart

353 lines
13 KiB
Dart

import 'dart:async';
import 'dart:io';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:fluttertoast/fluttertoast.dart';
import 'package:path_provider/path_provider.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:provider/provider.dart';
import 'package:record_mp3/record_mp3.dart';
import 'package:rive/rive.dart';
import 'package:speech_to_text/speech_recognition_error.dart';
import 'package:speech_to_text/speech_recognition_result.dart';
import 'package:speech_to_text/speech_to_text.dart';
import 'package:test_sa/controllers/providers/settings/setting_provider.dart';
import 'package:test_sa/extensions/context_extension.dart';
import 'package:test_sa/extensions/int_extensions.dart';
import 'package:test_sa/extensions/text_extensions.dart';
import 'package:test_sa/extensions/widget_extensions.dart';
import 'package:test_sa/views/widgets/sound/sound_player.dart';
import '../../../new_views/app_style/app_color.dart';
import '../../../new_views/common_widgets/app_text_form_field.dart';
class TextSpeechRecordWidget extends StatefulWidget {
final Function(String) onRecord;
final Function(String) onStop;
final bool enabled;
final Function(String) onMessageChange;
final String initialMessage;
const TextSpeechRecordWidget({Key key, @required this.onRecord, this.onStop, this.enabled = true, this.onMessageChange, this.initialMessage}) : super(key: key);
@override
State<TextSpeechRecordWidget> createState() => _RecordSoundState();
}
class _RecordSoundState extends State<TextSpeechRecordWidget> {
// FlutterSoundRecorder _myRecorder = FlutterSoundRecorder();
bool _recorderIsOpened = false;
bool _recording = false;
bool _played = false;
String _record;
Artboard _rive;
Timer _timer;
TextEditingController _timeController;
TextEditingController _commentController;
bool _speechEnabled = false;
FocusNode node = FocusNode();
@override
void setState(VoidCallback fn) {
if (mounted) super.setState(fn);
}
final SpeechToText _speechToText = SpeechToText();
/// This has to happen only once per app
void _initSpeech() async {
_speechEnabled = await _speechToText.initialize(
onError: (SpeechRecognitionError error) async {
Fluttertoast.showToast(msg: "failed to convert text to speech");
setState(() {});
},
);
}
SettingProvider _settingProvider;
@override
void initState() {
super.initState();
_initSpeech();
_timeController = TextEditingController();
_commentController = TextEditingController();
node.unfocus();
_recorderIsOpened = true;
// RecordMp3.instance.start(recordFilePath, (type) {
// // record fail callback
// });
// _myRecorder.openRecorder().then((value) {
// _recorderIsOpened = true;
// setState(() {});
// });
// Load the animation file from the bundle, note that you could also
// download this. The RiveFile just expects a list of bytes.
rootBundle.load('assets/rives/recording.riv').then(
(data) async {
// Load the RiveFile from the binary data.
final file = RiveFile.import(data);
// The artboard is the root of the animation and gets drawn in the
// Rive widget.
final artboard = file.mainArtboard;
// Add a controller to play back a known animation on the main/default
// artboard.We store a reference to it so we can toggle playback.
artboard.addController(SimpleAnimation('recording'));
_rive = artboard;
setState(() {});
},
);
}
@override
void dispose() {
_timeController?.dispose();
// Be careful : you must `close` the audio session when you have finished with it.
RecordMp3.instance.stop();
//_myRecorder.closeRecorder();
// _myRecorder = null;
super.dispose();
}
String recordingFileDirectory;
_startRecording() async {
PermissionStatus status = await Permission.microphone.request();
if (!status.isGranted) {
PermissionStatus status = await Permission.microphone.request();
if (!status.isGranted) {
Fluttertoast.showToast(msg: "Permission Denied");
return;
}
}
_timer = Timer.periodic(const Duration(seconds: 1), (timer) {
setState(() {
String duration = Duration(seconds: timer?.tick).toString();
duration = duration.substring(duration.indexOf(":") + 1, duration.indexOf("."));
String recordTime = ((timer?.tick ?? 0) / 60)?.toStringAsFixed(2)?.replaceFirst(".", ":");
// print("recordTime:$recordTime");
if (recordTime.length == 4 || recordTime.length == 7) {
recordTime = "0$recordTime";
}
_timeController.text = duration;
});
});
_rive.addController(SimpleAnimation('recording'));
if (!_recorderIsOpened) {
// await _myRecorder.openRecorder();
_recorderIsOpened = true;
}
final Directory tempDir = await getTemporaryDirectory();
recordingFileDirectory = "${tempDir.path}/record_${DateTime.now().millisecondsSinceEpoch}.mp3";
RecordMp3.instance.start(recordingFileDirectory, (type) {
// record fail callback
});
// await _myRecorder.startRecorder(toFile: "record_${DateTime.now().millisecondsSinceEpoch}.mp3", codec: Codec.aacADTS, sampleRate: 360000, bitRate: 360000);
_recording = true;
setState(() {});
}
_stopRecording() async {
if (!_recording) {
setState(() {});
return;
}
if (_timer?.isActive ?? false) {
_timer.cancel();
}
RecordMp3.instance.stop();
//String path = (await _myRecorder.stopRecorder()).toString();
_record = recordingFileDirectory;
widget.onRecord(recordingFileDirectory);
_recording = false;
setState(() {});
}
void _startListening() async {
_speechEnabled = _speechToText.isAvailable;
if (_speechToText.isListening) {
Fluttertoast.showToast(msg: "Currently in use");
return;
}
if (!_speechEnabled) return;
await _speechToText.listen(
onResult: (SpeechRecognitionResult result) {
_commentController.text = result.recognizedWords;
widget.onMessageChange(_commentController.text);
setState(() {});
},
localeId: _settingProvider.speechToText);
setState(() {});
}
void _stopListening() async {
await _speechToText.stop();
setState(() {});
}
_cancelRecording() async {
if (!_recording) return;
RecordMp3.instance.stop();
// String path = await _myRecorder.stopRecorder();
// _myRecorder.deleteRecord(fileName: path);
_rive.addController(SimpleAnimation('delete'));
// rebuild();
_recording = false;
await Future.delayed(const Duration(seconds: 1));
if (!_recording) setState(() {});
// _message.memoryAudio.;
}
@override
Widget build(BuildContext context) {
if (node.hasFocus && widget.enabled) node.unfocus();
_settingProvider ??= Provider.of<SettingProvider>(context);
return Column(
mainAxisSize: MainAxisSize.min,
children: [
Row(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
if (!_recording) ...[
AppTextFormField(
controller: _commentController,
labelText: _speechToText.isListening ? "Listening..." : context.translation.callComments,
alignLabelWithHint: true,
showWithoutDecoration: true,
backgroundColor: Colors.transparent,
textInputType: TextInputType.multiline,
// suffixIcon:
// (_recording ? "record".toLottieAsset(height: 24) : (_record != null ? "trash" : "mic").toSvgAsset(color: context.isDark ? AppColor.neutral10 : AppColor.neutral20, height: 24))
// .paddingOnly(end: 16),
initialValue: widget.initialMessage,
onChange: (text) {
widget.onMessageChange(text);
},
onSaved: (text) {
widget.onMessageChange(text);
},
).expanded,
(_speechToText.isListening
? SizedBox(
height: 24.toScreenHeight,
width: 24.toScreenWidth,
child: const Icon(
Icons.fiber_manual_record,
color: Colors.red,
)).onPress(() {
_stopListening();
})
: PopupMenuButton<String>(
child: SizedBox(
height: 24.toScreenHeight,
width: 24.toScreenWidth,
child: "speech_to_text".toSvgAsset(color: context.isDark ? AppColor.neutral10 : AppColor.neutral20, height: 24, width: 24),
),
onSelected: (String selectedLanguage) {
_settingProvider.setSpeechToText(selectedLanguage);
_startListening();
},
itemBuilder: (BuildContext context) {
return [
const PopupMenuItem<String>(
value: 'ar',
child: Text('Arabic'),
),
const PopupMenuItem<String>(
value: 'en',
child: Text('English'),
),
];
},
))
.paddingOnly(end: 16,top: 16),
],
if (_recording)
AppTextFormField(
enable: widget.enabled,
node: node,
backgroundColor: Colors.transparent,
showWithoutDecoration: true,
controller: _timeController,
labelText: context.translation.recordVoice,
textInputType: TextInputType.multiline,
alignLabelWithHint: true,
initialValue: (_timeController?.text?.isEmpty ?? true) ? "00:00" : _timeController?.text,
// suffixIcon:
// (_recording ? "record".toLottieAsset(height: 24) : (_record != null ? "trash" : "mic").toSvgAsset(color: context.isDark ? AppColor.neutral10 : AppColor.neutral20, height: 24))
// .paddingOnly(end: 16),
).expanded,
// SizedBox(
// height: 24.toScreenHeight,
// width: 24.toScreenWidth,
// child: "speech_to_text".toSvgAsset(color: context.isDark ? AppColor.neutral10 : AppColor.neutral20, height: 24, width: 24),
// ).paddingOnly(end: 16).onPress(_speechEnabled
// ? () async {
// if (!_speechEnabled) {
// Fluttertoast.showToast(msg: "microphone not available");
// return;
// }
// if (_speechToText.isListening) {
// _stopListening();
// } else {
// PopupMenuButton<String>(
// onSelected: (String selectedLanguage) {
// _settingProvider.setSpeechToText(selectedLanguage);
// _startListening();
// },
// itemBuilder: (BuildContext context) => <PopupMenuEntry<String>>[
// const PopupMenuItem<String>(
// value: "ar",
// child: Text('Arabic'),
// ),
// const PopupMenuItem<String>(
// value: "en",
// child: Text('English'),
// ),
// ],
// );
// }
// }
// : null),
SizedBox(
height: _recording ? 40 : 24.toScreenHeight,
width: _recording ? 40 : 24.toScreenWidth,
child: (_recording
? "record".toLottieAsset(height: 24)
: (_record != null ? "trash" : "mic").toSvgAsset(color: context.isDark ? AppColor.neutral10 : AppColor.neutral20, height: 24, width: 24)),
).paddingOnly(end: 16,top: 16).onPress(() {
if (_recording) {
_stopRecording();
} else if (_record != null) {
_timeController?.text = "00:00";
widget.onRecord(null);
_record = null;
setState(() {});
} else {
_startRecording();
}
}),
],
),
if (_record != null) ...[
//8.height,
const Divider().defaultStyle(context),
ASoundPlayer(audio: _record).paddingOnly(top: 8, bottom: 16, start: 16, end: 16),
]
],
).toShadowContainer(context, padding: 0);
}
}