text to speech, call comments & voice record added.
parent
e65d2a44ed
commit
a9042a834c
@ -0,0 +1,3 @@
|
||||
<svg width="17" height="17" viewBox="0 0 17 17" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M2.05 14.95C2.26 16.11 3.28 17 4.5 17H14.5C15.88 17 17 15.88 17 14.5V10.5C17 9.12 15.88 8 14.5 8H14V2.5C14 1.12 12.88 0 11.5 0H2.5C1.12 0 0 1.12 0 2.5V12.5C0 13.72 0.88 14.74 2.05 14.95ZM3.5 3H10.5C10.78 3 11 3.22 11 3.5C11 3.78 10.78 4 10.5 4H3.5C3.22 4 3 3.78 3 3.5C3 3.22 3.22 3 3.5 3ZM3.5 5.5H9.5C9.78 5.5 10 5.72 10 6C10 6.28 9.78 6.5 9.5 6.5H3.5C3.22 6.5 3 6.28 3 6C3 5.72 3.22 5.5 3.5 5.5ZM3 10.5C3 9.67 3.67 9 4.5 9H11.5C11.78 9 12 8.78 12 8.5V7.71L13.15 8.86C13.25 8.96 13.37 9.01 13.5 9.01H14.5C15.33 9.01 16 9.68 16 10.51V14.51C16 15.34 15.33 16.01 14.5 16.01H4.5C3.67 16.01 3 15.34 3 14.51V10.51V10.5Z" fill="#7D859A"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 744 B |
@ -0,0 +1,352 @@
|
||||
import 'dart:async';
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:flutter/services.dart';
|
||||
import 'package:fluttertoast/fluttertoast.dart';
|
||||
import 'package:path_provider/path_provider.dart';
|
||||
import 'package:permission_handler/permission_handler.dart';
|
||||
import 'package:provider/provider.dart';
|
||||
import 'package:record_mp3/record_mp3.dart';
|
||||
import 'package:rive/rive.dart';
|
||||
import 'package:speech_to_text/speech_recognition_error.dart';
|
||||
import 'package:speech_to_text/speech_recognition_result.dart';
|
||||
import 'package:speech_to_text/speech_to_text.dart';
|
||||
import 'package:test_sa/controllers/providers/settings/setting_provider.dart';
|
||||
import 'package:test_sa/extensions/context_extension.dart';
|
||||
import 'package:test_sa/extensions/int_extensions.dart';
|
||||
import 'package:test_sa/extensions/text_extensions.dart';
|
||||
import 'package:test_sa/extensions/widget_extensions.dart';
|
||||
import 'package:test_sa/views/widgets/sound/sound_player.dart';
|
||||
|
||||
import '../../../new_views/app_style/app_color.dart';
|
||||
import '../../../new_views/common_widgets/app_text_form_field.dart';
|
||||
|
||||
class TextSpeechRecordWidget extends StatefulWidget {
|
||||
final Function(String) onRecord;
|
||||
final Function(String) onStop;
|
||||
final bool enabled;
|
||||
final Function(String) onMessageChange;
|
||||
final String initialMessage;
|
||||
|
||||
const TextSpeechRecordWidget({Key key, @required this.onRecord, this.onStop, this.enabled = true, this.onMessageChange, this.initialMessage}) : super(key: key);
|
||||
|
||||
@override
|
||||
State<TextSpeechRecordWidget> createState() => _RecordSoundState();
|
||||
}
|
||||
|
||||
class _RecordSoundState extends State<TextSpeechRecordWidget> {
|
||||
// FlutterSoundRecorder _myRecorder = FlutterSoundRecorder();
|
||||
|
||||
bool _recorderIsOpened = false;
|
||||
bool _recording = false;
|
||||
bool _played = false;
|
||||
String _record;
|
||||
Artboard _rive;
|
||||
Timer _timer;
|
||||
TextEditingController _timeController;
|
||||
TextEditingController _commentController;
|
||||
bool _speechEnabled = false;
|
||||
|
||||
FocusNode node = FocusNode();
|
||||
|
||||
@override
|
||||
void setState(VoidCallback fn) {
|
||||
if (mounted) super.setState(fn);
|
||||
}
|
||||
|
||||
final SpeechToText _speechToText = SpeechToText();
|
||||
|
||||
/// This has to happen only once per app
|
||||
void _initSpeech() async {
|
||||
_speechEnabled = await _speechToText.initialize(
|
||||
onError: (SpeechRecognitionError error) async {
|
||||
Fluttertoast.showToast(msg: "failed to convert text to speech");
|
||||
setState(() {});
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
SettingProvider _settingProvider;
|
||||
|
||||
@override
|
||||
void initState() {
|
||||
super.initState();
|
||||
_initSpeech();
|
||||
_timeController = TextEditingController();
|
||||
_commentController = TextEditingController();
|
||||
node.unfocus();
|
||||
_recorderIsOpened = true;
|
||||
// RecordMp3.instance.start(recordFilePath, (type) {
|
||||
// // record fail callback
|
||||
// });
|
||||
// _myRecorder.openRecorder().then((value) {
|
||||
// _recorderIsOpened = true;
|
||||
// setState(() {});
|
||||
// });
|
||||
|
||||
// Load the animation file from the bundle, note that you could also
|
||||
// download this. The RiveFile just expects a list of bytes.
|
||||
rootBundle.load('assets/rives/recording.riv').then(
|
||||
(data) async {
|
||||
// Load the RiveFile from the binary data.
|
||||
final file = RiveFile.import(data);
|
||||
// The artboard is the root of the animation and gets drawn in the
|
||||
// Rive widget.
|
||||
final artboard = file.mainArtboard;
|
||||
// Add a controller to play back a known animation on the main/default
|
||||
// artboard.We store a reference to it so we can toggle playback.
|
||||
artboard.addController(SimpleAnimation('recording'));
|
||||
_rive = artboard;
|
||||
|
||||
setState(() {});
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
void dispose() {
|
||||
_timeController?.dispose();
|
||||
// Be careful : you must `close` the audio session when you have finished with it.
|
||||
RecordMp3.instance.stop();
|
||||
//_myRecorder.closeRecorder();
|
||||
// _myRecorder = null;
|
||||
super.dispose();
|
||||
}
|
||||
|
||||
String recordingFileDirectory;
|
||||
|
||||
_startRecording() async {
|
||||
PermissionStatus status = await Permission.microphone.request();
|
||||
if (!status.isGranted) {
|
||||
PermissionStatus status = await Permission.microphone.request();
|
||||
if (!status.isGranted) {
|
||||
Fluttertoast.showToast(msg: "Permission Denied");
|
||||
return;
|
||||
}
|
||||
}
|
||||
_timer = Timer.periodic(const Duration(seconds: 1), (timer) {
|
||||
setState(() {
|
||||
String duration = Duration(seconds: timer?.tick).toString();
|
||||
duration = duration.substring(duration.indexOf(":") + 1, duration.indexOf("."));
|
||||
|
||||
String recordTime = ((timer?.tick ?? 0) / 60)?.toStringAsFixed(2)?.replaceFirst(".", ":");
|
||||
// print("recordTime:$recordTime");
|
||||
if (recordTime.length == 4 || recordTime.length == 7) {
|
||||
recordTime = "0$recordTime";
|
||||
}
|
||||
_timeController.text = duration;
|
||||
});
|
||||
});
|
||||
_rive.addController(SimpleAnimation('recording'));
|
||||
if (!_recorderIsOpened) {
|
||||
// await _myRecorder.openRecorder();
|
||||
_recorderIsOpened = true;
|
||||
}
|
||||
final Directory tempDir = await getTemporaryDirectory();
|
||||
recordingFileDirectory = "${tempDir.path}/record_${DateTime.now().millisecondsSinceEpoch}.mp3";
|
||||
RecordMp3.instance.start(recordingFileDirectory, (type) {
|
||||
// record fail callback
|
||||
});
|
||||
|
||||
// await _myRecorder.startRecorder(toFile: "record_${DateTime.now().millisecondsSinceEpoch}.mp3", codec: Codec.aacADTS, sampleRate: 360000, bitRate: 360000);
|
||||
|
||||
_recording = true;
|
||||
setState(() {});
|
||||
}
|
||||
|
||||
_stopRecording() async {
|
||||
if (!_recording) {
|
||||
setState(() {});
|
||||
return;
|
||||
}
|
||||
if (_timer?.isActive ?? false) {
|
||||
_timer.cancel();
|
||||
}
|
||||
RecordMp3.instance.stop();
|
||||
|
||||
//String path = (await _myRecorder.stopRecorder()).toString();
|
||||
_record = recordingFileDirectory;
|
||||
widget.onRecord(recordingFileDirectory);
|
||||
_recording = false;
|
||||
setState(() {});
|
||||
}
|
||||
|
||||
void _startListening() async {
|
||||
_speechEnabled = _speechToText.isAvailable;
|
||||
if (_speechToText.isListening) {
|
||||
Fluttertoast.showToast(msg: "Currently in use");
|
||||
return;
|
||||
}
|
||||
if (!_speechEnabled) return;
|
||||
await _speechToText.listen(
|
||||
onResult: (SpeechRecognitionResult result) {
|
||||
_commentController.text = result.recognizedWords;
|
||||
widget.onMessageChange(_commentController.text);
|
||||
setState(() {});
|
||||
},
|
||||
localeId: _settingProvider.speechToText);
|
||||
setState(() {});
|
||||
}
|
||||
|
||||
void _stopListening() async {
|
||||
await _speechToText.stop();
|
||||
setState(() {});
|
||||
}
|
||||
|
||||
_cancelRecording() async {
|
||||
if (!_recording) return;
|
||||
RecordMp3.instance.stop();
|
||||
// String path = await _myRecorder.stopRecorder();
|
||||
// _myRecorder.deleteRecord(fileName: path);
|
||||
_rive.addController(SimpleAnimation('delete'));
|
||||
|
||||
// rebuild();
|
||||
_recording = false;
|
||||
await Future.delayed(const Duration(seconds: 1));
|
||||
if (!_recording) setState(() {});
|
||||
// _message.memoryAudio.;
|
||||
}
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
if (node.hasFocus && widget.enabled) node.unfocus();
|
||||
_settingProvider ??= Provider.of<SettingProvider>(context);
|
||||
return Column(
|
||||
mainAxisSize: MainAxisSize.min,
|
||||
children: [
|
||||
Row(
|
||||
crossAxisAlignment: CrossAxisAlignment.start,
|
||||
children: [
|
||||
if (!_recording) ...[
|
||||
AppTextFormField(
|
||||
controller: _commentController,
|
||||
labelText: _speechToText.isListening ? "Listening..." : context.translation.callComments,
|
||||
alignLabelWithHint: true,
|
||||
showWithoutDecoration: true,
|
||||
backgroundColor: Colors.transparent,
|
||||
textInputType: TextInputType.multiline,
|
||||
// suffixIcon:
|
||||
// (_recording ? "record".toLottieAsset(height: 24) : (_record != null ? "trash" : "mic").toSvgAsset(color: context.isDark ? AppColor.neutral10 : AppColor.neutral20, height: 24))
|
||||
// .paddingOnly(end: 16),
|
||||
initialValue: widget.initialMessage,
|
||||
onChange: (text) {
|
||||
widget.onMessageChange(text);
|
||||
},
|
||||
onSaved: (text) {
|
||||
widget.onMessageChange(text);
|
||||
},
|
||||
).expanded,
|
||||
(_speechToText.isListening
|
||||
? SizedBox(
|
||||
height: 24.toScreenHeight,
|
||||
width: 24.toScreenWidth,
|
||||
child: const Icon(
|
||||
Icons.fiber_manual_record,
|
||||
color: Colors.red,
|
||||
)).onPress(() {
|
||||
_stopListening();
|
||||
})
|
||||
: PopupMenuButton<String>(
|
||||
child: SizedBox(
|
||||
height: 24.toScreenHeight,
|
||||
width: 24.toScreenWidth,
|
||||
child: "speech_to_text".toSvgAsset(color: context.isDark ? AppColor.neutral10 : AppColor.neutral20, height: 24, width: 24),
|
||||
),
|
||||
onSelected: (String selectedLanguage) {
|
||||
_settingProvider.setSpeechToText(selectedLanguage);
|
||||
_startListening();
|
||||
},
|
||||
itemBuilder: (BuildContext context) {
|
||||
return [
|
||||
const PopupMenuItem<String>(
|
||||
value: 'ar',
|
||||
child: Text('Arabic'),
|
||||
),
|
||||
const PopupMenuItem<String>(
|
||||
value: 'en',
|
||||
child: Text('English'),
|
||||
),
|
||||
];
|
||||
},
|
||||
))
|
||||
.paddingOnly(end: 16,top: 16),
|
||||
],
|
||||
|
||||
if (_recording)
|
||||
AppTextFormField(
|
||||
enable: widget.enabled,
|
||||
node: node,
|
||||
backgroundColor: Colors.transparent,
|
||||
showWithoutDecoration: true,
|
||||
controller: _timeController,
|
||||
labelText: context.translation.recordVoice,
|
||||
textInputType: TextInputType.multiline,
|
||||
alignLabelWithHint: true,
|
||||
initialValue: (_timeController?.text?.isEmpty ?? true) ? "00:00" : _timeController?.text,
|
||||
// suffixIcon:
|
||||
// (_recording ? "record".toLottieAsset(height: 24) : (_record != null ? "trash" : "mic").toSvgAsset(color: context.isDark ? AppColor.neutral10 : AppColor.neutral20, height: 24))
|
||||
// .paddingOnly(end: 16),
|
||||
).expanded,
|
||||
|
||||
// SizedBox(
|
||||
// height: 24.toScreenHeight,
|
||||
// width: 24.toScreenWidth,
|
||||
// child: "speech_to_text".toSvgAsset(color: context.isDark ? AppColor.neutral10 : AppColor.neutral20, height: 24, width: 24),
|
||||
// ).paddingOnly(end: 16).onPress(_speechEnabled
|
||||
// ? () async {
|
||||
// if (!_speechEnabled) {
|
||||
// Fluttertoast.showToast(msg: "microphone not available");
|
||||
// return;
|
||||
// }
|
||||
// if (_speechToText.isListening) {
|
||||
// _stopListening();
|
||||
// } else {
|
||||
// PopupMenuButton<String>(
|
||||
// onSelected: (String selectedLanguage) {
|
||||
// _settingProvider.setSpeechToText(selectedLanguage);
|
||||
// _startListening();
|
||||
// },
|
||||
// itemBuilder: (BuildContext context) => <PopupMenuEntry<String>>[
|
||||
// const PopupMenuItem<String>(
|
||||
// value: "ar",
|
||||
// child: Text('Arabic'),
|
||||
// ),
|
||||
// const PopupMenuItem<String>(
|
||||
// value: "en",
|
||||
// child: Text('English'),
|
||||
// ),
|
||||
// ],
|
||||
// );
|
||||
// }
|
||||
// }
|
||||
// : null),
|
||||
SizedBox(
|
||||
height: _recording ? 40 : 24.toScreenHeight,
|
||||
width: _recording ? 40 : 24.toScreenWidth,
|
||||
child: (_recording
|
||||
? "record".toLottieAsset(height: 24)
|
||||
: (_record != null ? "trash" : "mic").toSvgAsset(color: context.isDark ? AppColor.neutral10 : AppColor.neutral20, height: 24, width: 24)),
|
||||
).paddingOnly(end: 16,top: 16).onPress(() {
|
||||
if (_recording) {
|
||||
_stopRecording();
|
||||
} else if (_record != null) {
|
||||
_timeController?.text = "00:00";
|
||||
widget.onRecord(null);
|
||||
_record = null;
|
||||
setState(() {});
|
||||
} else {
|
||||
_startRecording();
|
||||
}
|
||||
}),
|
||||
],
|
||||
),
|
||||
if (_record != null) ...[
|
||||
//8.height,
|
||||
const Divider().defaultStyle(context),
|
||||
ASoundPlayer(audio: _record).paddingOnly(top: 8, bottom: 16, start: 16, end: 16),
|
||||
]
|
||||
],
|
||||
).toShadowContainer(context, padding: 0);
|
||||
}
|
||||
}
|
||||
Loading…
Reference in New Issue