Voice Chat Fixes & Audio Player Implementation

merge-requests/116/head
Aamir Muhammad 3 years ago
parent dd6b0c0902
commit fb3b3e8e46

@ -1,8 +1,8 @@
import 'dart:convert';
import 'dart:io';
import 'dart:typed_data';
import 'package:audio_waveforms/audio_waveforms.dart';
import 'package:flutter/foundation.dart';
import 'package:just_audio/just_audio.dart';
List<SingleUserChatModel> singleUserChatModelFromJson(String str) => List<SingleUserChatModel>.from(json.decode(str).map((x) => SingleUserChatModel.fromJson(x)));
@ -61,8 +61,8 @@ class SingleUserChatModel {
bool? isReplied;
bool? isImageLoaded;
Uint8List? image;
Uint8List? voice;
PlayerController? voiceController;
File? voice;
AudioPlayer? voiceController;
factory SingleUserChatModel.fromJson(Map<String, dynamic> json) => SingleUserChatModel(
userChatHistoryId: json["userChatHistoryId"] == null ? null : json["userChatHistoryId"],
@ -90,7 +90,7 @@ class SingleUserChatModel {
isImageLoaded: false,
image: null,
voice: null,
voiceController: json["fileTypeId"] == 13 ? PlayerController() : null);
voiceController: json["fileTypeId"] == 13 ? AudioPlayer() : null);
Map<String, dynamic> toJson() => {
"userChatHistoryId": userChatHistoryId == null ? null : userChatHistoryId,

@ -10,6 +10,7 @@ import 'package:flutter/foundation.dart';
import 'package:flutter/services.dart';
import 'package:http/http.dart';
import 'package:just_audio/just_audio.dart' as JustAudio;
import 'package:just_audio/just_audio.dart';
import 'package:mohem_flutter_app/api/chat/chat_api_client.dart';
import 'package:mohem_flutter_app/app_state/app_state.dart';
import 'package:mohem_flutter_app/classes/consts.dart';
@ -35,22 +36,21 @@ class ChatProviderModel with ChangeNotifier, DiagnosticableTreeMixin {
TextEditingController message = TextEditingController();
TextEditingController search = TextEditingController();
List<SingleUserChatModel> userChatHistory = [];
List<SingleUserChatModel> userChatHistory = [], repliedMsg = [];
List<ChatUser>? pChatHistory, searchedChats;
String chatCID = '';
bool isLoading = true;
bool isChatScreenActive = false;
int receiverID = 0;
late File selectedFile;
bool isFileSelected = false;
String sFileType = "";
bool isMsgReply = false;
List<SingleUserChatModel> repliedMsg = [];
List<ChatUser> favUsersList = [];
int paginationVal = 0;
bool currentUserTyping = false;
int? cTypingUserId = 0;
bool isTextMsg = false, isReplyMsg = false, isAttachmentMsg = false, isVoiceMsg = false;
//Chat Home Page Counter
int chatUConvCounter = 0;
@ -411,6 +411,9 @@ class ChatProviderModel with ChangeNotifier, DiagnosticableTreeMixin {
}
void OnSubmitChatAsync(List<Object?>? parameters) {
print(isChatScreenActive);
print(receiverID);
print(isChatScreenActive);
logger.i(parameters);
List<SingleUserChatModel> data = [], temp = [];
for (dynamic msg in parameters!) {
@ -537,45 +540,56 @@ class ChatProviderModel with ChangeNotifier, DiagnosticableTreeMixin {
required bool isAttachment,
required bool isReply,
Uint8List? image,
Uint8List? voice,
required bool isImageLoaded}) async {
required bool isImageLoaded,
String? userEmail,
int? userStatus,
File? voiceFile,
required bool isVoiceAttached}) async {
Uuid uuid = const Uuid();
String contentNo = uuid.v4();
String msg = message.text;
String msg;
if (isVoiceAttached) {
msg = voiceFile!.path.split("/").last;
} else {
msg = message.text;
}
logger.w(jsonEncode(repliedMsg));
SingleUserChatModel data = SingleUserChatModel(
userChatHistoryId: 0,
chatEventId: chatEventId,
chatSource: 1,
contant: msg,
contantNo: contentNo,
conversationId: chatCID,
createdDate: DateTime.now(),
currentUserId: AppState().chatDetails!.response!.id,
currentUserName: AppState().chatDetails!.response!.userName,
targetUserId: targetUserId,
targetUserName: targetUserName,
isReplied: false,
fileTypeId: fileTypeId,
userChatReplyResponse: isReply ? UserChatReplyResponse.fromJson(repliedMsg.first.toJson()) : null,
fileTypeResponse: isAttachment
? FileTypeResponse(
fileTypeId: fileTypeId,
fileTypeName: getFileExtension(selectedFile.path).toString(),
fileKind: "file",
fileName: selectedFile.path.split("/").last,
fileTypeDescription: getFileTypeDescription(getFileExtension(selectedFile.path).toString()),
)
: null,
image: image,
isImageLoaded: isImageLoaded,
voice: voice,
);
userChatHistoryId: 0,
chatEventId: chatEventId,
chatSource: 1,
contant: msg,
contantNo: contentNo,
conversationId: chatCID,
createdDate: DateTime.now(),
currentUserId: AppState().chatDetails!.response!.id,
currentUserName: AppState().chatDetails!.response!.userName,
targetUserId: targetUserId,
targetUserName: targetUserName,
isReplied: false,
fileTypeId: fileTypeId,
userChatReplyResponse: isReply ? UserChatReplyResponse.fromJson(repliedMsg.first.toJson()) : null,
fileTypeResponse: isAttachment
? FileTypeResponse(
fileTypeId: fileTypeId,
fileTypeName: isVoiceMsg ? getFileExtension(voiceFile!.path).toString() : getFileExtension(selectedFile.path).toString(),
fileKind: "file",
fileName: isVoiceMsg ? msg : selectedFile.path.split("/").last,
fileTypeDescription: isVoiceMsg ? getFileTypeDescription(getFileExtension(voiceFile!.path).toString()) : getFileTypeDescription(getFileExtension(selectedFile.path).toString()),
)
: null,
image: image,
isImageLoaded: isImageLoaded,
voice: isVoiceMsg ? voiceFile! : null,
voiceController: isVoiceMsg ? AudioPlayer() : null);
if (kDebugMode) {
logger.i("model data: " + jsonEncode(data));
}
userChatHistory.insert(0, data);
isFileSelected = false;
isMsgReply = false;
isTextMsg = false;
isReplyMsg = false;
isAttachmentMsg = false;
isVoiceMsg = false;
sFileType = "";
message.clear();
notifyListeners();
@ -586,20 +600,55 @@ class ChatProviderModel with ChangeNotifier, DiagnosticableTreeMixin {
}
void sendChatMessage(BuildContext context, {required int targetUserId, required int userStatus, required String userEmail, required String targetUserName}) async {
if (!isFileSelected && !isMsgReply) {
if (kDebugMode) {
print("Normal Text Msg");
if (kDebugMode) {
print("====================== Values ============================");
print("Is Text " + isTextMsg.toString());
print("isReply " + isReplyMsg.toString());
print("isAttachment " + isAttachmentMsg.toString());
print("isVoice " + isVoiceMsg.toString());
}
//Text
if (isTextMsg && !isAttachmentMsg && !isVoiceMsg && !isReplyMsg) {
logger.d("// Normal Text Message");
if (message.text == null || message.text.isEmpty) {
return;
}
sendChatToServer(
chatEventId: 1,
fileTypeId: null,
targetUserId: targetUserId,
targetUserName: targetUserName,
isAttachment: false,
chatReplyId: null,
isReply: false,
isImageLoaded: false,
image: null,
isVoiceAttached: false,
userEmail: userEmail,
userStatus: userStatus);
} else if (isTextMsg && !isAttachmentMsg && !isVoiceMsg && isReplyMsg) {
logger.d("// Text Message as Reply");
if (message.text == null || message.text.isEmpty) {
return;
}
sendChatToServer(
chatEventId: 1, fileTypeId: null, targetUserId: targetUserId, targetUserName: targetUserName, isAttachment: false, chatReplyId: null, isReply: false, isImageLoaded: false, image: null);
chatEventId: 1,
fileTypeId: null,
targetUserId: targetUserId,
targetUserName: targetUserName,
chatReplyId: repliedMsg.first.userChatHistoryId,
isAttachment: false,
isReply: true,
isImageLoaded: repliedMsg.first.isImageLoaded!,
image: repliedMsg.first.image,
isVoiceAttached: false,
voiceFile: null,
userEmail: userEmail,
userStatus: userStatus);
}
if (isFileSelected && !isMsgReply) {
if (kDebugMode) {
logger.i("Normal Attachment Msg");
}
// Attachment
else if (!isTextMsg && isAttachmentMsg && !isVoiceMsg && !isReplyMsg) {
logger.d("// Normal Image Message");
Utils.showLoading(context);
dynamic value = await uploadAttachments(AppState().chatDetails!.response!.id.toString(), selectedFile);
String? ext = getFileExtension(selectedFile.path);
@ -613,46 +662,100 @@ class ChatProviderModel with ChangeNotifier, DiagnosticableTreeMixin {
chatReplyId: null,
isReply: false,
isImageLoaded: true,
image: selectedFile.readAsBytesSync());
image: selectedFile.readAsBytesSync(),
isVoiceAttached: false,
userEmail: userEmail,
userStatus: userStatus);
} else if (!isTextMsg && isAttachmentMsg && !isVoiceMsg && isReplyMsg) {
logger.d("// Image as Reply Msg");
Utils.showLoading(context);
dynamic value = await uploadAttachments(AppState().chatDetails!.response!.id.toString(), selectedFile);
String? ext = getFileExtension(selectedFile.path);
Utils.hideLoading(context);
sendChatToServer(
chatEventId: 2,
fileTypeId: getFileType(ext.toString()),
targetUserId: targetUserId,
targetUserName: targetUserName,
isAttachment: true,
chatReplyId: repliedMsg.first.userChatHistoryId,
isReply: true,
isImageLoaded: true,
image: selectedFile.readAsBytesSync(),
isVoiceAttached: false,
userEmail: userEmail,
userStatus: userStatus);
}
if (!isFileSelected && isMsgReply) {
if (kDebugMode) {
print("Normal Text To Text Reply");
//Voice
else if (!isTextMsg && !isAttachmentMsg && isVoiceMsg && !isReplyMsg) {
logger.d("// Normal Voice Message");
if (!isPause) {
path = await recorderController.stop(false);
}
if (message.text == null || message.text.isEmpty) {
return;
if (kDebugMode) {
logger.i("path:" + path!);
}
File voiceFile = File(path!);
voiceFile.readAsBytesSync();
_timer?.cancel();
isPause = false;
isPlaying = false;
isRecoding = false;
Utils.showLoading(context);
dynamic value = await uploadAttachments(AppState().chatDetails!.response!.id.toString(), voiceFile);
String? ext = getFileExtension(voiceFile.path);
Utils.hideLoading(context);
sendChatToServer(
chatEventId: 1,
fileTypeId: null,
chatEventId: 2,
fileTypeId: getFileType(ext.toString()),
targetUserId: targetUserId,
targetUserName: targetUserName,
chatReplyId: repliedMsg.first.userChatHistoryId,
isAttachment: false,
isReply: true,
isImageLoaded: repliedMsg.first.isImageLoaded!,
image: repliedMsg.first.image);
} // reply msg over image && normal
if (isFileSelected && isMsgReply) {
chatReplyId: null,
isAttachment: true,
isReply: isReplyMsg,
isImageLoaded: false,
voiceFile: voiceFile,
isVoiceAttached: true,
userEmail: userEmail,
userStatus: userStatus);
notifyListeners();
} else if (!isTextMsg && !isAttachmentMsg && isVoiceMsg && isReplyMsg) {
logger.d("// Voice as Reply Msg");
if (!isPause) {
path = await recorderController.stop(false);
}
if (kDebugMode) {
print("Reply With File");
logger.i("path:" + path!);
}
File voiceFile = File(path!);
voiceFile.readAsBytesSync();
_timer?.cancel();
isPause = false;
isPlaying = false;
isRecoding = false;
Utils.showLoading(context);
dynamic value = await uploadAttachments(AppState().chatDetails!.response!.id.toString(), selectedFile);
String? ext = getFileExtension(selectedFile.path);
dynamic value = await uploadAttachments(AppState().chatDetails!.response!.id.toString(), voiceFile);
String? ext = getFileExtension(voiceFile.path);
Utils.hideLoading(context);
sendChatToServer(
chatEventId: 2,
fileTypeId: getFileType(ext.toString()),
targetUserId: targetUserId,
targetUserName: targetUserName,
chatReplyId: null,
isAttachment: true,
chatReplyId: repliedMsg.first.userChatHistoryId,
isReply: true,
isImageLoaded: true,
image: selectedFile.readAsBytesSync());
isReply: isReplyMsg,
isImageLoaded: false,
voiceFile: voiceFile,
isVoiceAttached: true,
userEmail: userEmail,
userStatus: userStatus);
notifyListeners();
}
if (searchedChats != null) {
dynamic contain = searchedChats!.where((ChatUser element) => element.id == targetUserId);
if (contain.isEmpty) {
@ -676,34 +779,36 @@ class ChatProviderModel with ChangeNotifier, DiagnosticableTreeMixin {
);
notifyListeners();
}
} else {
List<String> emails = [];
emails.add(await EmailImageEncryption().encrypt(val: userEmail));
List<ChatUserImageModel> chatImages = await ChatApiClient().getUsersImages(encryptedEmails: emails);
searchedChats!.add(
ChatUser(
id: targetUserId,
userName: targetUserName,
unreadMessageCount: 0,
email: userEmail,
isImageLoading: false,
image: chatImages.first.profilePicture ?? "",
isImageLoaded: true,
isTyping: false,
isFav: false,
userStatus: userStatus,
userLocalDownlaodedImage: await downloadImageLocal(chatImages.first.profilePicture, targetUserId.toString()),
),
);
notifyListeners();
}
// else {
// List<String> emails = [];
// emails.add(await EmailImageEncryption().encrypt(val: userEmail));
// List<ChatUserImageModel> chatImages = await ChatApiClient().getUsersImages(encryptedEmails: emails);
// searchedChats!.add(
// ChatUser(
// id: targetUserId,
// userName: targetUserName,
// unreadMessageCount: 0,
// email: userEmail,
// isImageLoading: false,
// image: chatImages.first.profilePicture ?? "",
// isImageLoaded: true,
// isTyping: false,
// isFav: false,
// userStatus: userStatus,
// userLocalDownlaodedImage: await downloadImageLocal(chatImages.first.profilePicture, targetUserId.toString()),
// ),
// );
// notifyListeners();
// }
}
void selectImageToUpload(BuildContext context) {
ImageOptions.showImageOptionsNew(context, true, (String image, File file) async {
if (checkFileSize(file.path)) {
selectedFile = file;
isFileSelected = true;
isAttachmentMsg = true;
isTextMsg = false;
sFileType = getFileExtension(file.path)!;
message.text = file.path.split("/").last;
Navigator.of(context).pop();
@ -715,7 +820,7 @@ class ChatProviderModel with ChangeNotifier, DiagnosticableTreeMixin {
}
void removeAttachment() {
isFileSelected = false;
isAttachmentMsg = false;
sFileType = "";
message.text = '';
notifyListeners();
@ -784,14 +889,14 @@ class ChatProviderModel with ChangeNotifier, DiagnosticableTreeMixin {
void chatReply(SingleUserChatModel data) {
repliedMsg = [];
data.isReplied = true;
isMsgReply = true;
isReplyMsg = true;
repliedMsg.add(data);
notifyListeners();
}
void closeMe() {
repliedMsg = [];
isMsgReply = false;
isReplyMsg = false;
notifyListeners();
}
@ -841,10 +946,12 @@ class ChatProviderModel with ChangeNotifier, DiagnosticableTreeMixin {
receiverID = 0;
paginationVal = 0;
message.text = '';
isFileSelected = false;
isAttachmentMsg = false;
repliedMsg = [];
sFileType = "";
isMsgReply = false;
isReplyMsg = false;
isTextMsg = false;
isVoiceMsg = false;
notifyListeners();
}
@ -855,7 +962,10 @@ class ChatProviderModel with ChangeNotifier, DiagnosticableTreeMixin {
receiverID = 0;
paginationVal = 0;
message.text = '';
isFileSelected = false;
isTextMsg = false;
isAttachmentMsg = false;
isVoiceMsg = false;
isReplyMsg = false;
repliedMsg = [];
sFileType = "";
}
@ -866,7 +976,10 @@ class ChatProviderModel with ChangeNotifier, DiagnosticableTreeMixin {
receiverID = 0;
paginationVal = 0;
message.text = '';
isFileSelected = false;
isTextMsg = false;
isAttachmentMsg = false;
isVoiceMsg = false;
isReplyMsg = false;
repliedMsg = [];
sFileType = "";
deleteData();
@ -1052,6 +1165,7 @@ class ChatProviderModel with ChangeNotifier, DiagnosticableTreeMixin {
isRecoding = false;
isPlaying = false;
isPause = false;
isVoiceMsg = false;
recorderController.dispose();
playerController.dispose();
}
@ -1061,6 +1175,7 @@ class ChatProviderModel with ChangeNotifier, DiagnosticableTreeMixin {
if (status.isDenied == true) {
startRecoding();
} else {
isVoiceMsg = true;
recorderController.reset();
await recorderController.record(path);
_recodeDuration = 0;
@ -1123,6 +1238,7 @@ class ChatProviderModel with ChangeNotifier, DiagnosticableTreeMixin {
isPause = false;
isRecoding = false;
isPlaying = false;
isVoiceMsg = false;
notifyListeners();
}
}
@ -1141,169 +1257,49 @@ class ChatProviderModel with ChangeNotifier, DiagnosticableTreeMixin {
return numberStr;
}
void playRecoding() async {
isPlaying = true;
await playerController.startPlayer(finishMode: FinishMode.pause);
}
void playOrPause() async {
playerController.playerState == PlayerState.playing ? await playerController.pausePlayer() : playRecoding();
notifyListeners();
}
void sendVoiceMessage(BuildContext context, {required int targetUserId, required int userStatus, required String userEmail, required String targetUserName}) async {
if (!isPause) {
path = await recorderController.stop(false);
}
if (kDebugMode) {
logger.i("path:" + path!);
}
File voiceFile = File(path!);
voiceFile.readAsBytesSync();
_timer?.cancel();
isPause = false;
isPlaying = false;
isRecoding = false;
Utils.showLoading(context);
dynamic value = await uploadAttachments(AppState().chatDetails!.response!.id.toString(), voiceFile);
String? ext = getFileExtension(voiceFile.path);
Utils.hideLoading(context);
sendVoiceMessageToServer(
msgText: voiceFile.path!.split("/").last,
chatEventId: 2,
fileTypeId: getFileType(ext.toString()),
targetUserId: targetUserId,
targetUserName: targetUserName,
isVoiceAttached: true,
voice: voiceFile.readAsBytesSync(),
userEmail: userEmail,
userStatus: userStatus,
chatReplyId: null,
isAttachment: true,
isReply: isMsgReply,
voiceFile: voiceFile,
);
notifyListeners();
}
Future<void> sendVoiceMessageToServer(
{String? msgText,
int? chatEventId,
int? fileTypeId,
int? targetUserId,
String? targetUserName,
bool? isVoiceAttached,
Uint8List? voice,
String? userEmail,
int? userStatus,
bool? isReply,
bool? isAttachment,
int? chatReplyId,
File? voiceFile}) async {
Uuid uuid = const Uuid();
String contentNo = uuid.v4();
String msg = msgText!;
SingleUserChatModel data = SingleUserChatModel(
chatEventId: chatEventId,
chatSource: 1,
contant: msg,
contantNo: contentNo,
conversationId: chatCID,
createdDate: DateTime.now(),
currentUserId: AppState().chatDetails!.response!.id,
currentUserName: AppState().chatDetails!.response!.userName,
targetUserId: targetUserId,
targetUserName: targetUserName,
isReplied: false,
fileTypeId: fileTypeId,
userChatReplyResponse: isReply! ? UserChatReplyResponse.fromJson(repliedMsg.first.toJson()) : null,
fileTypeResponse: isAttachment!
? FileTypeResponse(
fileTypeId: fileTypeId,
fileTypeName: getFileExtension(voiceFile!.path).toString(),
fileKind: "file",
fileName: msgText,
fileTypeDescription: getFileTypeDescription(getFileExtension(voiceFile!.path).toString()),
)
: null,
image: null,
isImageLoaded: false,
voice: voice,
);
userChatHistory.insert(0, data);
notifyListeners();
String chatData =
'{"contant":"$msg","contantNo":"$contentNo","chatEventId":$chatEventId,"fileTypeId": $fileTypeId,"currentUserId":${AppState().chatDetails!.response!.id},"chatSource":1,"userChatHistoryLineRequestList":[{"isSeen":false,"isDelivered":false,"targetUserId":$targetUserId,"targetUserStatus":1}],"chatReplyId":$chatReplyId,"conversationId":"$chatCID"}';
await chatHubConnection.invoke("AddChatUserAsync", args: <Object>[json.decode(chatData)]);
if (searchedChats != null) {
dynamic contain = searchedChats!.where((ChatUser element) => element.id == targetUserId);
if (contain.isEmpty) {
List<String> emails = [];
emails.add(await EmailImageEncryption().encrypt(val: userEmail!));
List<ChatUserImageModel> chatImages = await ChatApiClient().getUsersImages(encryptedEmails: emails);
searchedChats!.add(
ChatUser(
id: targetUserId,
userName: targetUserName,
unreadMessageCount: 0,
email: userEmail,
isImageLoading: false,
image: chatImages.first.profilePicture ?? "",
isImageLoaded: true,
isTyping: false,
isFav: false,
userStatus: userStatus,
userLocalDownlaodedImage: await downloadImageLocal(chatImages.first.profilePicture, targetUserId.toString()),
),
);
notifyListeners();
}
} else {
List<String> emails = [];
emails.add(await EmailImageEncryption().encrypt(val: userEmail!));
List<ChatUserImageModel> chatImages = await ChatApiClient().getUsersImages(encryptedEmails: emails);
searchedChats!.add(
ChatUser(
id: targetUserId,
userName: targetUserName,
unreadMessageCount: 0,
email: userEmail,
isImageLoading: false,
image: chatImages.first.profilePicture ?? "",
isImageLoaded: true,
isTyping: false,
isFav: false,
userStatus: userStatus,
userLocalDownlaodedImage: await downloadImageLocal(chatImages.first.profilePicture, targetUserId.toString()),
),
);
notifyListeners();
}
}
// void playRecoding() async {
// isPlaying = true;
// await playerController.startPlayer(finishMode: FinishMode.pause);
//}
void playVoice(
BuildContext context, {
required SingleUserChatModel data,
}) async {
Utils.showLoading(context);
Uint8List encodedString = await ChatApiClient().downloadURL(fileName: data.contant!, fileTypeDescription: getFileTypeDescription(data.fileTypeResponse!.fileTypeName ?? ""));
try {
String path = await downChatVoice(encodedString, data.fileTypeResponse!.fileTypeName ?? "", data);
File file = File(path!);
file.readAsBytesSync();
Utils.hideLoading(context);
await data.voiceController!.preparePlayer(file.path, 1.0);
data.voiceController!.startPlayer(finishMode: FinishMode.stop);
notifyListeners();
} catch (e) {
Utils.showToast("Cannot open file.");
if (data.voice != null && data.voice!.existsSync()) {
print("Heree");
await data.voiceController!.setFilePath(data!.voice!.path);
await data.voiceController!.setLoopMode(LoopMode.off);
Duration? duration = await data.voiceController!.load();
await data.voiceController!.seek(duration);
await data.voiceController!.play();
} else {
Utils.showLoading(context);
Uint8List encodedString = await ChatApiClient().downloadURL(fileName: data.contant!, fileTypeDescription: getFileTypeDescription(data.fileTypeResponse!.fileTypeName ?? ""));
try {
String path = await downChatVoice(encodedString, data.fileTypeResponse!.fileTypeName ?? "", data);
File file = File(path!);
await file.readAsBytes();
data.voice = file;
Duration? duration = await data.voiceController!.setFilePath(file.path);
await data.voiceController!.setLoopMode(LoopMode.off);
await data.voiceController!.seek(duration);
await data.voiceController!.setVolume(1.0);
await data.voiceController!.load();
Utils.hideLoading(context);
await data.voiceController!.play();
} catch (e) {
Utils.showToast("Cannot open file.");
}
}
}
void stopPlaying(BuildContext context, {required SingleUserChatModel data}) async {
await data.voiceController!.stopPlayer();
notifyListeners();
void pausePlaying(BuildContext context, {required SingleUserChatModel data}) async {
await data.voiceController!.pause();
}
void resumePlaying(BuildContext context, {required SingleUserChatModel data}) async {
await data.voiceController!.play();
}
Future<String> downChatVoice(Uint8List bytes, String ext, SingleUserChatModel data) async {

@ -1,23 +1,18 @@
import 'dart:convert';
import 'dart:typed_data';
import 'package:audio_waveforms/audio_waveforms.dart';
import 'package:audio_waveforms/audio_waveforms.dart' as awf;
import 'package:just_audio/just_audio.dart';
import 'package:flutter/material.dart';
import 'package:flutter_svg/flutter_svg.dart';
import 'package:mohem_flutter_app/api/api_client.dart';
import 'package:mohem_flutter_app/api/chat/chat_api_client.dart';
import 'package:mohem_flutter_app/app_state/app_state.dart';
import 'package:mohem_flutter_app/classes/colors.dart';
import 'package:mohem_flutter_app/classes/utils.dart';
import 'package:mohem_flutter_app/extensions/int_extensions.dart';
import 'package:mohem_flutter_app/extensions/string_extensions.dart';
import 'package:mohem_flutter_app/extensions/widget_extensions.dart';
import 'package:mohem_flutter_app/main.dart';
import 'package:mohem_flutter_app/models/chat/get_single_user_chat_list_model.dart';
import 'package:mohem_flutter_app/provider/chat_provider_model.dart';
import 'package:mohem_flutter_app/ui/chat/chat_full_image_preview.dart';
import 'package:mohem_flutter_app/widgets/bottom_sheet.dart';
import 'package:open_file/open_file.dart';
import 'package:provider/provider.dart';
class ChatBubble extends StatelessWidget {
@ -29,7 +24,7 @@ class ChatBubble extends StatelessWidget {
bool isReplied = false;
int? fileTypeID;
String? fileTypeName;
late ChatProviderModel data;
late ChatProviderModel provider;
String? fileTypeDescription;
bool isDelivered = false;
@ -52,7 +47,7 @@ class ChatBubble extends StatelessWidget {
Size windowSize = MediaQuery.of(context).size;
screenOffset = Offset(windowSize.width / 2, windowSize.height / 2);
makeAssign();
data = Provider.of<ChatProviderModel>(context, listen: false);
provider = Provider.of<ChatProviderModel>(context, listen: false);
return isCurrentUser ? currentUser(context) : receiptUser(context);
}
@ -101,7 +96,7 @@ class ChatBubble extends StatelessWidget {
),
),
).paddingOnly(bottom: 7).onPress(() {
data.scrollToMsg(cItem);
provider.scrollToMsg(cItem);
}),
if (fileTypeID == 12 || fileTypeID == 4 || fileTypeID == 3)
ClipRRect(
@ -119,20 +114,14 @@ class ChatBubble extends StatelessWidget {
),
).paddingOnly(bottom: 4),
if (fileTypeID == 13)
currentWaveBubble(context, cItem).onPress(() {
if (cItem.voiceController!.playerState == PlayerState.playing) {
data.stopPlaying(context, data: cItem);
} else {
data.playVoice(context, data: cItem);
}
})
currentWaveBubble(context, cItem)
else
Row(
children: [
if (fileTypeID == 1 || fileTypeID == 5 || fileTypeID == 7 || fileTypeID == 6 || fileTypeID == 8
// || fileTypeID == 2
)
SvgPicture.asset(data.getType(fileTypeName ?? ""), height: 30, width: 22, alignment: Alignment.center, fit: BoxFit.cover).paddingOnly(left: 0, right: 10),
SvgPicture.asset(provider.getType(fileTypeName ?? ""), height: 30, width: 22, alignment: Alignment.center, fit: BoxFit.cover).paddingOnly(left: 0, right: 10),
(cItem.contant ?? "").toText12().expanded,
if (fileTypeID == 1 || fileTypeID == 5 || fileTypeID == 7 || fileTypeID == 6 || fileTypeID == 8
//|| fileTypeID == 2
@ -210,7 +199,7 @@ class ChatBubble extends StatelessWidget {
),
),
).paddingOnly(bottom: 7).onPress(() {
data.scrollToMsg(cItem);
provider.scrollToMsg(cItem);
}),
if (fileTypeID == 12 || fileTypeID == 4 || fileTypeID == 3)
ClipRRect(
@ -228,20 +217,14 @@ class ChatBubble extends StatelessWidget {
),
).paddingOnly(bottom: 4),
if (fileTypeID == 13)
recipetWaveBubble(context, cItem).onPress(() {
if (cItem.voiceController!.playerState == PlayerState.playing) {
data.stopPlaying(context, data: cItem);
} else {
data.playVoice(context, data: cItem);
}
})
recipetWaveBubble(context, cItem)
else
Row(
children: [
if (fileTypeID == 1 || fileTypeID == 5 || fileTypeID == 7 || fileTypeID == 6 || fileTypeID == 8
// || fileTypeID == 2
)
SvgPicture.asset(data.getType(fileTypeName ?? ""), height: 30, width: 22, alignment: Alignment.center, fit: BoxFit.cover).paddingOnly(left: 0, right: 10),
SvgPicture.asset(provider.getType(fileTypeName ?? ""), height: 30, width: 22, alignment: Alignment.center, fit: BoxFit.cover).paddingOnly(left: 0, right: 10),
(cItem.contant ?? "").toText12(color: Colors.white).expanded,
if (fileTypeID == 1 || fileTypeID == 5 || fileTypeID == 7 || fileTypeID == 6 || fileTypeID == 8
//|| fileTypeID == 2
@ -261,8 +244,6 @@ class ChatBubble extends StatelessWidget {
}
Widget showImage({required bool isReplyPreview, required String fileName, required String fileTypeDescription}) {
if (isReplyPreview) {}
if (cItem.isImageLoaded! && cItem.image != null) {
return Image.memory(
cItem.image!,
@ -301,7 +282,6 @@ class ChatBubble extends StatelessWidget {
}
Widget currentWaveBubble(BuildContext context, SingleUserChatModel data) {
PlayerController cunController = PlayerController();
return Container(
margin: const EdgeInsets.all(0),
decoration: BoxDecoration(
@ -309,40 +289,18 @@ class ChatBubble extends StatelessWidget {
left: BorderSide(width: 6, color: isCurrentUser ? MyColors.gradiantStartColor : MyColors.white),
),
color: isCurrentUser ? MyColors.black.withOpacity(0.10) : MyColors.black.withOpacity(0.30),
// gradient: const LinearGradient(
// transform: GradientRotation(.83),
// begin: Alignment.topRight,
// end: Alignment.bottomLeft,
// colors: <Color>[
// MyColors.gradiantEndColor,
// MyColors.gradiantStartColor,
// ],
// ),
),
child: Row(
mainAxisSize: MainAxisSize.max,
children: [
Icon(
data.voiceController!.playerState == PlayerState.playing ? Icons.stop_circle : Icons.play_arrow,
color: MyColors.lightGreenColor,
).paddingAll(10),
AudioFileWaveforms(
size: Size(MediaQuery.of(context).size.width * 0.3, 10),
playerController: data.voiceController!,
padding: EdgeInsets.zero,
margin: EdgeInsets.zero,
enableSeekGesture: true,
density: 1,
playerWaveStyle: const PlayerWaveStyle(
fixedWaveColor: Colors.white,
liveWaveColor: MyColors.greenColor,
showTop: true,
showBottom: true,
waveCap: StrokeCap.round,
seekLineThickness: 2,
visualizerHeight: 4,
backgroundColor: Colors.transparent,
),
getPlayer(player: data.voiceController!, modelData: data),
Slider(
activeColor: Colors.white,
inactiveColor: Colors.grey,
value: 0.toDouble(),
max: 50.toDouble(),
onChanged: (double value) {
// Add code to track the music duration.
},
).expanded,
],
),
@ -357,49 +315,71 @@ class ChatBubble extends StatelessWidget {
left: BorderSide(width: 6, color: isCurrentUser ? MyColors.gradiantStartColor : MyColors.white),
),
color: isCurrentUser ? MyColors.black.withOpacity(0.10) : MyColors.black.withOpacity(0.30),
// gradient: const LinearGradient(
// transform: GradientRotation(.83),
// begin: Alignment.topRight,
// end: Alignment.bottomLeft,
// colors: <Color>[
// MyColors.gradiantEndColor,
// MyColors.gradiantStartColor,
// ],
// ),
),
child: Row(
mainAxisSize: MainAxisSize.max,
children: [
Icon(
data.voiceController!.playerState == PlayerState.playing ? Icons.stop_circle : Icons.play_arrow,
color: MyColors.white,
).paddingAll(10),
AudioFileWaveforms(
size: Size(MediaQuery.of(context).size.width * 0.3, 10),
playerController: data.voiceController!,
padding: EdgeInsets.zero,
margin: EdgeInsets.zero,
enableSeekGesture: true,
density: 1,
playerWaveStyle: const PlayerWaveStyle(
fixedWaveColor: Colors.white,
liveWaveColor: MyColors.greenColor,
showTop: true,
showBottom: true,
waveCap: StrokeCap.round,
seekLineThickness: 2,
visualizerHeight: 4,
backgroundColor: Colors.transparent,
),
getPlayer(player: data.voiceController!, modelData: data),
Slider(
activeColor: Colors.white,
inactiveColor: Colors.grey,
value: 0.toDouble(),
max: 50.toDouble(),
onChanged: (double value) {
// Add code to track the music duration.
},
).expanded,
],
),
).circle(5);
}
Widget getPlayer({required AudioPlayer player, required SingleUserChatModel modelData}) {
return StreamBuilder<PlayerState>(
stream: player!.playerStateStream,
builder: (BuildContext context, AsyncSnapshot<PlayerState> snapshot) {
PlayerState? playerState = snapshot.data;
ProcessingState? processingState = playerState?.processingState;
bool? playing = playerState?.playing;
if (processingState == ProcessingState.loading || processingState == ProcessingState.buffering) {
return Container(
margin: const EdgeInsets.all(8.0),
width: 30.0,
height: 30.0,
child: const CircularProgressIndicator(),
);
} else if (playing != true) {
return Icon(
Icons.play_arrow,
size: 30,
color: MyColors.lightGreenColor,
).onPress(() {
provider.playVoice(context, data: modelData);
});
} else if (processingState != ProcessingState.completed) {
return Icon(
Icons.pause,
size: 30,
color: MyColors.lightGreenColor,
).onPress(() {
provider.pausePlaying(context, data: modelData);
});
} else {
return Icon(
Icons.replay,
size: 30,
color: MyColors.lightGreenColor,
).onPress(() {
player!.seek(Duration.zero);
});
}
},
);
}
}
class WaveBubble extends StatelessWidget {
final PlayerController playerController;
final awf.PlayerController playerController;
final VoidCallback onTap;
final bool isPlaying;
@ -436,14 +416,14 @@ class WaveBubble extends StatelessWidget {
splashColor: Colors.transparent,
highlightColor: Colors.transparent,
),
AudioFileWaveforms(
awf.AudioFileWaveforms(
size: Size(MediaQuery.of(context).size.width / 2, 10),
playerController: playerController,
padding: EdgeInsets.zero,
margin: EdgeInsets.zero,
enableSeekGesture: true,
density: 1,
playerWaveStyle: const PlayerWaveStyle(
playerWaveStyle: const awf.PlayerWaveStyle(
fixedWaveColor: Colors.white,
liveWaveColor: MyColors.greenColor,
showTop: true,

@ -144,7 +144,7 @@ class _ChatDetailScreenState extends State<ChatDetailScreen> {
);
},
).onPress(() async {
logger.d(m.userChatHistory[i].toJson());
logger.w(m.userChatHistory[i].toJson());
if (m.userChatHistory[i].fileTypeResponse != null && m.userChatHistory[i].fileTypeId != null) {
if (m.userChatHistory[i].fileTypeId! == 1 ||
m.userChatHistory[i].fileTypeId! == 5 ||
@ -161,7 +161,7 @@ class _ChatDetailScreenState extends State<ChatDetailScreen> {
},
),
).expanded,
if (m.isMsgReply)
if (m.isReplyMsg)
SizedBox(
height: 82,
child: Row(
@ -183,7 +183,7 @@ class _ChatDetailScreenState extends State<ChatDetailScreen> {
],
).expanded,
12.width,
if (m.isMsgReply && m.repliedMsg.isNotEmpty) showReplyImage(m.repliedMsg, m),
if (m.isReplyMsg && m.repliedMsg.isNotEmpty) showReplyImage(m.repliedMsg, m),
12.width,
const Icon(Icons.cancel, size: 23, color: MyColors.grey7BColor).onPress(m.closeMe),
],
@ -192,12 +192,9 @@ class _ChatDetailScreenState extends State<ChatDetailScreen> {
],
),
),
if (m.isFileSelected && m.sFileType == ".png" || m.sFileType == ".jpeg" || m.sFileType == ".jpg")
if (m.isAttachmentMsg && m.sFileType == ".png" || m.sFileType == ".jpeg" || m.sFileType == ".jpg")
SizedBox(height: 200, width: double.infinity, child: Image.file(m.selectedFile, fit: BoxFit.cover)).paddingOnly(left: 21, right: 21, top: 21),
const Divider(
height: 1,
color: MyColors.lightGreyEFColor,
),
const Divider(height: 1, color: MyColors.lightGreyEFColor),
if (m.isRecoding)
Column(
children: <Widget>[
@ -206,12 +203,11 @@ class _ChatDetailScreenState extends State<ChatDetailScreen> {
Text(m.buildTimer()).paddingAll(10),
if (m.isRecoding && m.isPlaying)
WaveBubble(
playerController: m.playerController,
onTap: () {
m.playOrPause();
},
isPlaying: m.playerController.playerState == PlayerState.playing)
.expanded
playerController: m.playerController,
isPlaying: m.playerController.playerState == PlayerState.playing,
onTap: () {
},
).expanded
else
AudioWaveforms(
waveStyle: const WaveStyle(
@ -243,25 +239,9 @@ class _ChatDetailScreenState extends State<ChatDetailScreen> {
).paddingAll(10).onPress(() {
m.deleteRecoding();
}),
// if (m.isPause)
// const Icon(
// Icons.mic,
// size: 26,
// color: MyColors.lightGreenColor,
// ).paddingOnly(right: 15).onPress(() {
// m.resumeRecoding();
// }),
// if (!m.isPause)
// const Icon(
// Icons.pause_circle_outline,
// size: 26,
// color: MyColors.lightGreenColor,
// ).paddingOnly(right: 15).onPress(() {
// m.pauseRecoding();
// }),
SvgPicture.asset("assets/icons/chat/chat_send_icon.svg", height: 26, width: 26)
.onPress(
() => m.sendVoiceMessage(context,
() => m.sendChatMessage(context,
targetUserId: params!.chatUser!.id!,
userStatus: params!.chatUser!.userStatus ?? 0,
userEmail: params!.chatUser!.email!,
@ -278,8 +258,8 @@ class _ChatDetailScreenState extends State<ChatDetailScreen> {
TextField(
controller: m.message,
decoration: InputDecoration(
hintText: m.isFileSelected ? m.selectedFile.path.split("/").last : LocaleKeys.typeheretoreply.tr(),
hintStyle: TextStyle(color: m.isFileSelected ? MyColors.darkTextColor : MyColors.grey98Color, fontSize: 14),
hintText: m.isAttachmentMsg ? m.selectedFile.path.split("/").last : LocaleKeys.typeheretoreply.tr(),
hintStyle: TextStyle(color: m.isAttachmentMsg ? MyColors.darkTextColor : MyColors.grey98Color, fontSize: 14),
border: InputBorder.none,
focusedBorder: InputBorder.none,
enabledBorder: InputBorder.none,
@ -297,7 +277,13 @@ class _ChatDetailScreenState extends State<ChatDetailScreen> {
? SvgPicture.asset(m.getType(m.sFileType), height: 30, width: 22, alignment: Alignment.center, fit: BoxFit.cover).paddingOnly(left: 21, right: 15)
: null,
),
onChanged: (val) {
onChanged: (String val) {
print(val.length);
if (val.isNotEmpty) {
m.isTextMsg = true;
} else {
m.isTextMsg = false;
}
m.userTypingInvoke(currentUser: AppState().chatDetails!.response!.id!, reciptUser: params!.chatUser!.id!);
},
).expanded,

Loading…
Cancel
Save