import 'package:flutter/material.dart'; import 'package:flutter/services.dart'; import 'package:flutter_sound/flutter_sound.dart'; import 'package:permission_handler/permission_handler.dart'; import 'package:rive/rive.dart'; import 'package:test_sa/views/widgets/buttons/app_icon_button2.dart'; import 'package:test_sa/views/widgets/buttons/app_small_button.dart'; import 'package:test_sa/views/widgets/sound/sound_player.dart'; import '../../app_style/sizing.dart'; class RecordSound extends StatefulWidget { final Function(String) onRecord; const RecordSound({Key key,@required this.onRecord}) : super(key: key); @override State createState() => _RecordSoundState(); } class _RecordSoundState extends State { FlutterSoundRecorder _myRecorder = FlutterSoundRecorder(); bool _recorderIsOpened = false; bool _recording = false; bool _fastTab = false; String _record; Artboard _rive; @override void setState(VoidCallback fn) { if(mounted) super.setState(fn); } @override void initState() { super.initState(); _myRecorder.openRecorder().then((value) { _recorderIsOpened = true; setState(() {}); }); // Load the animation file from the bundle, note that you could also // download this. The RiveFile just expects a list of bytes. rootBundle.load('assets/rives/recording.riv').then( (data) async { // Load the RiveFile from the binary data. final file = RiveFile.import(data); // The artboard is the root of the animation and gets drawn in the // Rive widget. final artboard = file.mainArtboard; // Add a controller to play back a known animation on the main/default // artboard.We store a reference to it so we can toggle playback. artboard.addController(SimpleAnimation('recording')); _rive = artboard; setState(() {}); }, ); } @override void dispose() { // Be careful : you must `close` the audio session when you have finished with it. _myRecorder.closeRecorder(); _myRecorder = null; super.dispose(); } _startRecording() async { _fastTab = false; // await Permission.camera PermissionStatus status = await Permission.microphone.request(); if(!status.isGranted){ return; } _rive.addController(SimpleAnimation('recording')); if(!_recorderIsOpened){ await _myRecorder.openRecorder(); _recorderIsOpened = true; } await _myRecorder.startRecorder( toFile: "record_${DateTime.now().millisecondsSinceEpoch}.mp4", codec: Codec.aacMP4, sampleRate: 360000, bitRate: 360000 ); _recording = true; setState(() {}); } _stopRecording() async { if(!_recording){ _fastTab = true; setState(() {}); return; } String path = (await _myRecorder.stopRecorder()).toString(); _record = path; widget.onRecord(path); _recording = false; setState(() { }); } _cancelRecording() async { if(!_recording) return; String path = await _myRecorder.stopRecorder(); _myRecorder.deleteRecord(fileName: path); _rive.addController(SimpleAnimation('delete')); // rebuild(); _recording = false; await Future.delayed(const Duration(seconds: 1)); if(!_recording) setState(() { }); // _message.memoryAudio.; } @override Widget build(BuildContext context) { return Column( children: [ Row( children: [ Expanded( child: _recording ? Row( children: [ ASmallButton( text: "done", onPressed: (){ _stopRecording(); }, ), Expanded( child: Stack( children: [ SizedBox( height: 24 * AppStyle.getScaleFactor(context), child: Rive(artboard: _rive,) ), InkWell( child: SizedBox( height: 32 * AppStyle.getScaleFactor(context), width: MediaQuery.of(context).size.width, ), onTap: (){ _cancelRecording(); }, ), ], ), ), ], ): _record != null ? Row( children: [ Expanded(child: ASoundPlayer(audio: _record)), AIconButton2( iconData: Icons.delete, onPressed: (){ widget.onRecord(null); _record = null; setState(() { }); }, ) ], ) : const Text("Record Voice"), ), Material( color: Colors.transparent, child: GestureDetector( //key: ValueKey("voice"), child: const Padding( padding: EdgeInsets.all(16.0), child: Icon(Icons.mic), ), onTapDown: (TapDownDetails details) async { _startRecording(); }, onTapUp: (TapUpDetails details) async { _stopRecording(); }, onTapCancel: () async { _cancelRecording(); }, ), ), ], ), ], ); } }