You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
mohemm-flutter-app/lib/provider/chat_call_provider.dart

422 lines
14 KiB
Dart

import 'dart:convert';
import 'dart:io';
import 'package:flutter/cupertino.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:flutter_webrtc/flutter_webrtc.dart';
import 'package:just_audio/just_audio.dart';
import 'package:mohem_flutter_app/app_state/app_state.dart';
import 'package:mohem_flutter_app/main.dart';
import 'package:mohem_flutter_app/models/chat/call.dart';
import 'package:mohem_flutter_app/models/chat/webrtc_payloads.dart';
import 'package:mohem_flutter_app/provider/chat_provider_model.dart';
import 'package:mohem_flutter_app/ui/chat/call/chat_incoming_call_screen.dart';
import 'package:mohem_flutter_app/ui/chat/call/start_call_screen.dart';
import 'package:mohem_flutter_app/ui/landing/dashboard_screen.dart';
class ChatCallProvider with ChangeNotifier, DiagnosticableTreeMixin {
///////////////////// Web RTC Video Calling //////////////////////
// Video Call
late RTCPeerConnection _pc;
late ChatProviderModel chatProvModel;
RTCVideoRenderer localVideoRenderer = RTCVideoRenderer();
RTCVideoRenderer remoteRenderer = RTCVideoRenderer();
final AudioPlayer player = AudioPlayer();
late MediaStream localStream;
late CallDataModel outGoingCallData;
bool isMicOff = false;
bool isLoudSpeaker = false;
bool isCamOff = false;
bool isCallEnded = false;
bool isVideoCall = false;
bool isCallStarted = false;
bool isFrontCamera = true;
/// WebRTC Connection Variables
bool _offer = false;
late BuildContext providerContext;
void initCallListeners({required BuildContext context}) {
providerContext = context;
if (kDebugMode) {
print("=================== Call Listeners Registered =======================");
}
chatHubConnection.on("OnCallAcceptedAsync", onCallAcceptedAsync);
chatHubConnection.on("OnIceCandidateAsync", onIceCandidateAsync);
chatHubConnection.on("OnOfferAsync", onOfferAsync);
chatHubConnection.on("OnAnswerOffer", onAnswerOffer);
chatHubConnection.on("OnHangUpAsync", onHangUpAsync);
chatHubConnection.on("OnCallDeclinedAsync", onCallDeclinedAsync);
chatHubConnection.on("OnIncomingCallAsync", OnIncomingCallAsync);
}
//Video Constraints
Map<String, Object> videoConstraints = {
"video": {
"mandatory": {
"width": {"min": 1280},
"height": {"min": 720}
},
"optional": [
{
"width": {"max": 1280}
},
{"frameRate": 25},
{"facingMode": "user"}
]
},
"frameRate": 25,
"width": 1280, //420,//640,//1280,
"height": 720, //240//480//720
"audio": true,
};
// Audio Constraints
Map<String, Object> audioConstraints = {
"sampleRate": 8000,
"sampleSize": 16,
"channelCount": 2,
"echoCancellation": true,
"audio": true,
};
Future<void> init() async {
creatOfferWithCon();
}
Future<void> initLocalCamera({required ChatProviderModel chatProvmodel, required callData, required BuildContext context, bool isIncomingCall = false}) async {
isCallEnded = false;
chatProvModel = chatProvmodel;
outGoingCallData = callData;
await localVideoRenderer.initialize();
localStream = await navigator.mediaDevices.getUserMedia(isVideoCall ? videoConstraints : audioConstraints);
localVideoRenderer.srcObject = localStream;
await remoteRenderer.initialize();
// playRingtone();
await startCall(callType: isVideoCall ? "Video" : "Audio", context: context);
_pc = await creatOfferWithCon();
notifyListeners();
}
Future<void> startCall({required String callType, required BuildContext context}) async {
chatProvModel.isTextMsg = true;
chatProvModel.isAttachmentMsg = false;
chatProvModel.isVoiceMsg = false;
chatProvModel.isReplyMsg = false;
chatProvModel.isCall = true;
chatProvModel.message.text = "Start $callType call ${outGoingCallData.receiverName.toString().replaceAll(".", " ")}";
chatProvModel.sendChatMessage(
context,
targetUserId: outGoingCallData.receiverId!,
userStatus: 1,
userEmail: outGoingCallData.receiverEmail!,
targetUserName: outGoingCallData.receiverName!,
);
await invoke(
invokeMethod: "CallUserAsync",
currentUserID: outGoingCallData.callerId!,
targetUserID: outGoingCallData.receiverId!,
);
await invoke(invokeMethod: "UpdateUserStatusAsync", currentUserID: outGoingCallData.callerId!, targetUserID: outGoingCallData.receiverId!, userStatus: 4);
}
Future<bool> endCall() async {
await invoke(invokeMethod: "UpdateUserStatusAsync", currentUserID: outGoingCallData.callerId!, targetUserID: outGoingCallData.receiverId!, userStatus: 1);
await invoke(invokeMethod: "HangUpAsync", currentUserID: outGoingCallData.callerId!, targetUserID: outGoingCallData.receiverId!, userStatus: 1);
_pc.dispose();
isCallStarted = false;
isVideoCall = false;
isCamOff = false;
isMicOff = false;
isLoudSpeaker = false;
localVideoRenderer.srcObject = null;
remoteRenderer.srcObject = null;
//player.stop();
_offer = false;
return true;
}
Future<void> startIncomingCall() async {
await localVideoRenderer.initialize();
localStream = await navigator.mediaDevices.getUserMedia(isVideoCall ? videoConstraints : audioConstraints);
localVideoRenderer.srcObject = localStream;
await remoteRenderer.initialize();
}
// OutGoing Listeners
void onCallAcceptedAsync(List<Object?>? params) async {
print("--------------------- On Call Accept ---------------------------------------");
dynamic items = params!.toList();
RTCSessionDescription description = await _createOffer();
await _pc.setLocalDescription(description);
var payload = {"target": items[0]["id"], "caller": outGoingCallData.callerId, "sdp": description.toMap()};
invoke(invokeMethod: "OfferAsync", currentUserID: outGoingCallData.callerId!, targetUserID: items[0]["id"], data: jsonEncode(payload));
}
Future<void> onIceCandidateAsync(List<Object?>? params) async {
print("--------------------- onIceCandidateAsync ---------------------------------------");
var items = params!.toList();
if (kDebugMode) {
logger.i("res: " + items.toString());
}
RemoteIceCandidatePayLoad data = RemoteIceCandidatePayLoad.fromJson(jsonDecode(items.first.toString()));
if (_pc != null) {
await _pc.addCandidate(RTCIceCandidate(data.candidate!.candidate, data.candidate!.sdpMid, data.candidate!.sdpMLineIndex));
if (!isCallStarted) {
isCallStarted = true;
if (isCallStarted) {
Navigator.push(
providerContext,
MaterialPageRoute(
builder: (BuildContext context) => StartCallPage(localRenderer: localVideoRenderer, remoteRenderer: remoteRenderer),
allowSnapshotting: false,
)).then((value) {
Navigator.of(providerContext).pop();
});
}
}
}
notifyListeners();
}
void onOfferAsync(List<Object?>? params) {
print("--------------------- onOfferAsync ---------------------------------------");
}
// Incoming Listeners
void onAnswerOffer(List<Object?>? payload) async {
print("--------------------- On Answer Offer Async ---------------------------------------");
var items = payload!.toList();
if (kDebugMode) {
logger.i("res: " + items.toString());
}
CallSessionPayLoad data = CallSessionPayLoad.fromJson(jsonDecode(items.first.toString()));
RTCSessionDescription description = RTCSessionDescription(data.sdp!.sdp, 'answer');
_pc.setRemoteDescription(description);
}
void onHangUpAsync(List<Object?>? params) {
print("--------------------- onHangUp ---------------------------------------");
endCall().then((bool value) {
isCallEnded = true;
notifyListeners();
});
}
Future<void> OnIncomingCallAsync(List<Object?>? params) async {
print("--------------------- On Incoming Call ---------------------------------------");
dynamic items = params!.toList();
logger.d(items);
Map<String, dynamic> json = {
"callerID": items[0]["id"],
"callerName": items[0]["userName"],
"callerEmail": items[0]["email"],
"callerTitle": items[0]["title"],
"callerPhone": null,
"receiverID": AppState().chatDetails!.response!.id,
"receiverName": AppState().chatDetails!.response!.userName,
"receiverEmail": AppState().chatDetails!.response!.email,
"receiverTitle": AppState().chatDetails!.response!.title,
"receiverPhone": AppState().chatDetails!.response!.phone,
"title": AppState().chatDetails!.response!.userName!.replaceAll(".", " "),
"callType": items[1] ? "Video" : "Audio",
};
CallDataModel callData = CallDataModel.fromJson(json);
await Navigator.push(
providerContext,
MaterialPageRoute(
builder: (BuildContext context) =>
IncomingCall(
isVideoCall: items[1] ? true : false,
outGoingCallData: callData,
),
),
);
}
void onCallDeclinedAsync(List<Object?>? params) {
print("--------------------- on Call Declined ---------------------------------------");
endCall().then((bool value) {
if (value) {
isCallEnded = true;
notifyListeners();
}
});
}
//// Invoke Methods
Future<void> invoke({required String invokeMethod, required int currentUserID, required int targetUserID, var data, int userStatus = 1}) async {
List<Object> args = [];
// logger.w(currentUserID.toString() + " -- " + targetUserID.toString() + " -- " + isVideoCall.toString());
if (invokeMethod == "CallUserAsync") {
args = [currentUserID, targetUserID, isVideoCall];
} else if (invokeMethod == "answerCallAsync") {
args = [currentUserID, targetUserID];
} else if (invokeMethod == "IceCandidateAsync") {
args = [targetUserID, data];
} else if (invokeMethod == "OfferAsync") {
args = [targetUserID, data];
} else if (invokeMethod == "AnswerOfferAsync") {
args = [targetUserID, data];
// json In Data
} else if (invokeMethod == "UpdateUserStatusAsync") {
args = [currentUserID, userStatus];
} else if (invokeMethod == "HangUpAsync") {
args = [currentUserID, targetUserID];
}
logger.d(args);
try {
await chatHubConnection.invoke("$invokeMethod", args: args);
} catch (e) {
logger.w(e);
}
}
void stopListeners() async {
chatHubConnection.off('OnCallDeclinedAsync');
chatHubConnection.off('OnCallAcceptedAsync');
chatHubConnection.off('OnIceCandidateAsync');
chatHubConnection.off('OnAnswerOffer');
}
void disposeRenders() async {
await localVideoRenderer.dispose();
localStream.dispose();
notifyListeners();
}
Future<RTCPeerConnection> creatOfferWithCon() async {
Map<String, dynamic> configuration = {
"sdpSemantics": "plan-b",
'iceServers': [
{
'urls': 'stun:15.185.116.59:3478',
},
{
'urls': 'turn:15.185.116.59:3479',
'username': 'admin',
'credential': 'admin',
},
]
};
Map<String, dynamic> offerSdpConstraints = {
'mandatory': {
'OfferToReceiveAudio': true,
'OfferToReceiveVideo': true,
},
'optional': []
};
RTCPeerConnection pc = await createPeerConnection(configuration, offerSdpConstraints);
await pc!.addStream(localStream!);
pc?.onConnectionState = (RTCPeerConnectionState state) {};
pc?.onAddStream = (MediaStream stream) {
remoteRenderer.srcObject = stream;
notifyListeners();
};
pc!.onIceCandidate = (RTCIceCandidate e) async {
if (e.candidate != null) {
var payload = {"target": outGoingCallData.callerId, "candidate": e.toMap()};
logger.i("Candidate:" + e.toMap().toString());
await invoke(invokeMethod: "IceCandidateAsync", currentUserID: outGoingCallData.callerId!, targetUserID: outGoingCallData.receiverId!, data: jsonEncode(payload));
}
};
// pc!.onTrack = (RTCTrackEvent event) async {
//
// String streamId = const Uuid().toString();
// MediaStream remoteStream = await createLocalMediaStream(streamId);
// event.streams[0].getTracks().forEach((MediaStreamTrack element) {
// logger.i("Stream Track: " + element.id.toString());
// // remoteRenderer.srcObject = element;
// remoteStream.addTrack(element);
// });
// };
pc!.onSignalingState = (RTCSignalingState state) {
logger.i("signaling state: " + state.name);
};
pc!.onIceGatheringState = (RTCIceGatheringState state) {
logger.i("rtc ice gathering state: " + state.name);
};
pc!.onIceConnectionState = (RTCIceConnectionState state) {
logger.i("rtc ice connection state: " + state.name);
};
pc!.onRenegotiationNeeded = () {};
return pc;
}
void playRingtone() async {
player.stop();
await player.setVolume(1.0);
String audioAsset = "";
if (Platform.isAndroid) {
audioAsset = "assets/audio/ring_60Sec.mp3";
} else {
audioAsset = "assets/audio/ring_30Sec.caf";
}
try {
await player.setAsset(audioAsset);
await player.load();
player.play();
} catch (e) {
print("Error: $e");
}
}
Future<RTCSessionDescription> _createOffer() async {
RTCSessionDescription description = await _pc!.createOffer();
_offer = true;
return description;
}
// Future<RTCSessionDescription> _createAnswer() async {
// RTCSessionDescription description = await _pc!.createAnswer();
// var session = description.sdp.toString();
// return description;
// _pc!.setLocalDescription(description);
// }
void micOff() {
isMicOff = !isMicOff;
localStream.getAudioTracks().forEach((track) {
track.enabled = !track.enabled;
});
notifyListeners();
}
void camOff() {
isCamOff = !isCamOff;
localStream.getVideoTracks().forEach((track) {
track.enabled = !track.enabled;
});
if (isCamOff) {
isVideoCall = false;
} else {
isVideoCall = true;
}
notifyListeners();
}
void loudOn() {
isLoudSpeaker = !isLoudSpeaker;
remoteRenderer.srcObject?.getAudioTracks().forEach((track) {
if (isLoudSpeaker) {
track.enableSpeakerphone(true);
} else {
track.enableSpeakerphone(false);
}
});
notifyListeners();
}
void switchCamera() {
isFrontCamera = !isFrontCamera;
print("================= Camera Switch Triggered ===================");
Helper.switchCamera(localStream.getVideoTracks()[0]);
notifyListeners();
}
}