import 'dart:async'; import 'dart:io'; import 'package:flutter/material.dart'; import 'package:flutter/services.dart'; import 'package:fluttertoast/fluttertoast.dart'; import 'package:path_provider/path_provider.dart'; import 'package:permission_handler/permission_handler.dart'; import 'package:record_mp3_plus/record_mp3_plus.dart'; import 'package:rive/rive.dart'; import 'package:test_sa/extensions/context_extension.dart'; import 'package:test_sa/extensions/int_extensions.dart'; import 'package:test_sa/extensions/text_extensions.dart'; import 'package:test_sa/extensions/widget_extensions.dart'; import 'package:test_sa/views/widgets/sound/sound_player.dart'; import '../../../new_views/app_style/app_color.dart'; import '../../../new_views/common_widgets/app_text_form_field.dart'; class RecordSound extends StatefulWidget { final Function(String?) onRecord; final Function(String)? onStop; final bool enabled; const RecordSound({Key? key, required this.onRecord, this.onStop, this.enabled = true}) : super(key: key); @override State createState() => _RecordSoundState(); } class _RecordSoundState extends State { // FlutterSoundRecorder _myRecorder = FlutterSoundRecorder(); bool _recorderIsOpened = false; bool _recording = false; bool _played = false; String? _record; late Artboard _rive; Timer? _timer; late TextEditingController _timeController; FocusNode node = FocusNode(); @override void setState(VoidCallback fn) { if (mounted) super.setState(fn); } @override void initState() { super.initState(); _timeController = TextEditingController(); node.unfocus(); _recorderIsOpened = true; // RecordMp3.instance.start(recordFilePath, (type) { // // record fail callback // }); // _myRecorder.openRecorder().then((value) { // _recorderIsOpened = true; // setState(() {}); // }); // Load the animation file from the bundle, note that you could also // download this. The RiveFile just expects a list of bytes. rootBundle.load('assets/rives/recording.riv').then( (data) async { // Load the RiveFile from the binary data. final file = RiveFile.import(data); // The artboard is the root of the animation and gets drawn in the // Rive widget. final artboard = file.mainArtboard; // Add a controller to play back a known animation on the main/default // artboard.We store a reference to it so we can toggle playback. artboard.addController(SimpleAnimation('recording')); _rive = artboard; setState(() {}); }, ); } @override void dispose() { _timeController?.dispose(); // Be careful : you must `close` the audio session when you have finished with it. RecordMp3.instance.stop(); //_myRecorder.closeRecorder(); // _myRecorder = null; super.dispose(); } late String recordingFileDirectory; _startRecording() async { PermissionStatus status = await Permission.microphone.request(); if (!status.isGranted) { PermissionStatus status = await Permission.microphone.request(); if (!status.isGranted) { Fluttertoast.showToast(msg: "Permission Denied"); return; } } _timer = Timer.periodic(const Duration(seconds: 1), (timer) { setState(() { String duration = Duration(seconds: timer.tick).toString(); duration = duration.substring(duration.indexOf(":") + 1, duration.indexOf(".")); String recordTime = ((timer.tick) / 60).toStringAsFixed(2).replaceFirst(".", ":"); // print("recordTime:$recordTime"); if (recordTime.length == 4 || recordTime.length == 7) { recordTime = "0$recordTime"; } _timeController.text = duration; }); }); _rive.addController(SimpleAnimation('recording')); if (!_recorderIsOpened) { // await _myRecorder.openRecorder(); _recorderIsOpened = true; } final Directory tempDir = await getTemporaryDirectory(); recordingFileDirectory = "${tempDir.path}/record_${DateTime.now().millisecondsSinceEpoch}.mp3"; RecordMp3.instance.start(recordingFileDirectory, (type) { // record fail callback }); // await _myRecorder.startRecorder(toFile: "record_${DateTime.now().millisecondsSinceEpoch}.mp3", codec: Codec.aacADTS, sampleRate: 360000, bitRate: 360000); _recording = true; setState(() {}); } _stopRecording() async { if (!_recording) { setState(() {}); return; } if (_timer?.isActive ?? false) { _timer?.cancel(); } RecordMp3.instance.stop(); //String path = (await _myRecorder.stopRecorder()).toString(); _record = recordingFileDirectory; widget.onRecord(recordingFileDirectory); _recording = false; setState(() {}); } _cancelRecording() async { if (!_recording) return; RecordMp3.instance.stop(); // String path = await _myRecorder.stopRecorder(); // _myRecorder.deleteRecord(fileName: path); _rive.addController(SimpleAnimation('delete')); // rebuild(); _recording = false; await Future.delayed(const Duration(seconds: 1)); if (!_recording) setState(() {}); // _message.memoryAudio.; } @override Widget build(BuildContext context) { if (node.hasFocus && widget.enabled) node.unfocus(); return Column( children: [ Stack( alignment: AlignmentDirectional.centerEnd, children: [ AppTextFormField( enable: widget.enabled, node: node, controller: _timeController, labelText: context.translation.recordVoice, initialValue: (_timeController.text.isEmpty) ? "00:00" : _timeController.text, suffixIcon: (_recording ? "record".toLottieAsset(height: 24) : (_record != null ? "trash" : "mic").toSvgAsset(color: context.isDark ? AppColor.neutral10 : AppColor.neutral20, height: 24)) .paddingOnly(end: 16), ), SizedBox(height: 50.toScreenHeight, width: 50.toScreenWidth).onPress(() { if (_recording) { _stopRecording(); } else if (_record != null) { _timeController?.text = "00:00"; widget.onRecord(null); _record = null; setState(() {}); } else { _startRecording(); } }), ], ), if (_record != null) 8.height, if (_record != null) ASoundPlayer(audio: _record!), ], ); } }