From 09e24fd33e29d8bf4ab668a05a60ddfaa6a8d47c Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Wed, 10 Nov 2021 21:40:36 +0800 Subject: [PATCH 01/15] feat: Migrate voip code from famedly-app. --- lib/src/voip.dart | 1556 +++++++++++++++++++++++++++++++++++++++++++++ pubspec.yaml | 2 + 2 files changed, 1558 insertions(+) create mode 100644 lib/src/voip.dart diff --git a/lib/src/voip.dart b/lib/src/voip.dart new file mode 100644 index 00000000..25dafbff --- /dev/null +++ b/lib/src/voip.dart @@ -0,0 +1,1556 @@ +import 'dart:async'; +import 'dart:core'; +import 'dart:io'; + +import 'package:flutter/foundation.dart'; +import 'package:flutter/widgets.dart'; + +import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'package:sdp_transform/sdp_transform.dart' as sdp_transform; + +import '../matrix.dart'; + +bool get kIsMobile => !kIsWeb && (Platform.isAndroid || Platform.isIOS); + +/// The default life time for call events, in millisecond. +const lifetimeMs = 10 * 1000; + +/// The length of time a call can be ringing for. +const callTimeoutSec = 60; + +/// Wrapped MediaStream, used to adapt Widget to display +class WrappedMediaStream { + MediaStream? stream; + final String userId; + final Room room; + + /// Current stream type, usermedia or screen-sharing + String purpose; + bool audioMuted; + bool videoMuted; + final Client client; + + /// for debug + String get title => '$displayName:$purpose:a[$audioMuted]:v[$videoMuted]'; + RTCVideoRenderer? renderer; + bool stopped = false; + void Function(bool audioMuted, bool videoMuted)? onMuteStateChanged; + void Function(MediaStream stream)? onNewStream; + + WrappedMediaStream( + {this.stream, + required this.room, + required this.userId, + required this.purpose, + required this.client, + required this.audioMuted, + required this.videoMuted}); + + /// Initialize the video renderer + Future initialize() async { + if (renderer == null) { + renderer = RTCVideoRenderer(); + await renderer?.initialize(); + } + renderer?.srcObject = stream; + renderer?.onResize = () { + Logs().i( + 'onResize [${stream!.id.substring(0, 8)}] ${renderer?.videoWidth} x ${renderer?.videoHeight}'); + }; + } + + Future dispose() async { + if (renderer != null) { + renderer?.srcObject = null; + await renderer?.dispose(); + renderer = null; + } + + if (isLocal() && stream != null) { + await stream?.dispose(); + stream = null; + } + } + + String get avatarName => + getUser().calcDisplayname(mxidLocalPartFallback: false); + + String? get displayName => getUser().displayName; + + User getUser() { + return room.getUserByMXIDSync(userId); + } + + bool isLocal() { + return userId == client.userID; + } + + bool isAudioMuted() { + return (stream != null && stream!.getAudioTracks().isEmpty) || audioMuted; + } + + bool isVideoMuted() { + return (stream != null && stream!.getVideoTracks().isEmpty) || videoMuted; + } + + void setNewStream(MediaStream newStream) { + stream = newStream; + renderer?.srcObject = stream; + if (onNewStream != null) { + onNewStream?.call(stream!); + } + } + + void setAudioMuted(bool muted) { + audioMuted = muted; + if (onMuteStateChanged != null) { + onMuteStateChanged?.call(audioMuted, videoMuted); + } + } + + void setVideoMuted(bool muted) { + videoMuted = muted; + if (onMuteStateChanged != null) { + onMuteStateChanged?.call(audioMuted, videoMuted); + } + } +} + +// Call state +enum CallState { + /// The call is inilalized but not yet started + kFledgling, + + /// The first time an invite is sent, the local has createdOffer + kInviteSent, + + /// getUserMedia or getDisplayMedia has been called, + /// but MediaStream has not yet been returned + kWaitLocalMedia, + + /// The local has createdOffer + kCreateOffer, + + /// Received a remote offer message and created a local Answer + kCreateAnswer, + + /// Answer sdp is set, but ice is not connected + kConnecting, + + /// WebRTC media stream is connected + kConnected, + + /// The call was received, but no processing has been done yet. + kRinging, + + /// End of call + kEnded, +} + +class CallErrorCode { + /// The user chose to end the call + static String UserHangup = 'user_hangup'; + + /// An error code when the local client failed to create an offer. + static String LocalOfferFailed = 'local_offer_failed'; + + /// An error code when there is no local mic/camera to use. This may be because + /// the hardware isn't plugged in, or the user has explicitly denied access. + static String NoUserMedia = 'no_user_media'; + + /// Error code used when a call event failed to send + /// because unknown devices were present in the room + static String UnknownDevices = 'unknown_devices'; + + /// Error code used when we fail to send the invite + /// for some reason other than there being unknown devices + static String SendInvite = 'send_invite'; + + /// An answer could not be created + + static String CreateAnswer = 'create_answer'; + + /// Error code used when we fail to send the answer + /// for some reason other than there being unknown devices + + static String SendAnswer = 'send_answer'; + + /// The session description from the other side could not be set + static String SetRemoteDescription = 'set_remote_description'; + + /// The session description from this side could not be set + static String SetLocalDescription = 'set_local_description'; + + /// A different device answered the call + static String AnsweredElsewhere = 'answered_elsewhere'; + + /// No media connection could be established to the other party + static String IceFailed = 'ice_failed'; + + /// The invite timed out whilst waiting for an answer + static String InviteTimeout = 'invite_timeout'; + + /// The call was replaced by another call + static String Replaced = 'replaced'; + + /// Signalling for the call could not be sent (other than the initial invite) + static String SignallingFailed = 'signalling_timeout'; + + /// The remote party is busy + static String UserBusy = 'user_busy'; + + /// We transferred the call off to somewhere else + static String Transfered = 'transferred'; +} + +class CallError extends Error { + final String code; + final String msg; + final dynamic err; + CallError(this.code, this.msg, this.err); + + @override + String toString() { + return '[$code] $msg, err: ${err.toString()}'; + } +} + +enum CallEvent { + /// The call was hangup by the local|remote user. + kHangup, + + /// The call state has changed + kState, + + /// The call got some error. + kError, + + /// Call transfer + kReplaced, + + /// The value of isLocalOnHold() has changed + kLocalHoldUnhold, + + /// The value of isRemoteOnHold() has changed + kRemoteHoldUnhold, + + /// Feeds have changed + kFeedsChanged, + + /// For sip calls. support in the future. + kAssertedIdentityChanged, +} + +enum CallType { kVoice, kVideo } + +enum Direction { kIncoming, kOutgoing } + +enum CallParty { kLocal, kRemote } + +/// Initialization parameters of the call session. +class CallOptions { + late String callId; + late CallType type; + late Direction dir; + late String localPartyId; + late VoIP voip; + late Room room; + late List> iceServers; +} + +/// A call session object +class CallSession { + CallSession(this.opts); + CallOptions opts; + CallType get type => opts.type; + Room get room => opts.room; + VoIP get voip => opts.voip; + String get callId => opts.callId; + String get localPartyId => opts.localPartyId; + String? get displayName => room.displayname; + Direction get direction => opts.dir; + CallState state = CallState.kFledgling; + bool get isOutgoing => direction == Direction.kOutgoing; + bool get isRinging => state == CallState.kRinging; + late RTCPeerConnection pc; + List remoteCandidates = []; + List localCandidates = []; + late AssertedIdentity remoteAssertedIdentity; + bool get callHasEnded => state == CallState.kEnded; + bool iceGatheringFinished = false; + bool inviteOrAnswerSent = false; + bool localHold = false; + bool remoteOnHold = false; + bool _answeredByUs = false; + bool speakerOn = false; + bool makingOffer = false; + bool ignoreOffer = false; + String facingMode = 'user'; + late Client client; + late String remotePartyId; + late User remoteUser; + late CallParty hangupParty; + late String hangupReason; + + SDPStreamMetadata? remoteSDPStreamMetadata; + List usermediaSenders = []; + List screensharingSenders = []; + Map streams = {}; + List get getLocalStreams => + streams.values.where((element) => element.isLocal()).toList(); + List get getRemoteStreams => + streams.values.where((element) => !element.isLocal()).toList(); + WrappedMediaStream? get localUserMediaStream => getLocalStreams.firstWhere( + (element) => element.purpose == SDPStreamMetadataPurpose.Usermedia, + orElse: () => Null as WrappedMediaStream); + WrappedMediaStream? get localScreenSharingStream => + getLocalStreams.firstWhere( + (element) => element.purpose == SDPStreamMetadataPurpose.Screenshare, + orElse: () => Null as WrappedMediaStream); + WrappedMediaStream? get remoteUserMediaStream => getRemoteStreams.firstWhere( + (element) => element.purpose == SDPStreamMetadataPurpose.Usermedia, + orElse: () => Null as WrappedMediaStream); + WrappedMediaStream? get remoteScreenSharingStream => + getRemoteStreams.firstWhere( + (element) => element.purpose == SDPStreamMetadataPurpose.Screenshare, + orElse: () => Null as WrappedMediaStream); + final _callStateController = + StreamController.broadcast(sync: true); + Stream get onCallStateChanged => _callStateController.stream; + final _callEventController = + StreamController.broadcast(sync: true); + Stream get onCallEventChanged => _callEventController.stream; + Timer? inviteTimer; + Timer? ringingTimer; + + Future initOutboundCall(CallType type) async { + await _preparePeerConnection(); + setCallState(CallState.kCreateOffer); + final stream = await _getUserMedia(type); + _addLocalStream(stream, SDPStreamMetadataPurpose.Usermedia); + } + + Future initWithInvite(CallType type, RTCSessionDescription offer, + SDPStreamMetadata? metadata, int lifetime) async { + await _preparePeerConnection(); + + _addLocalStream( + await _getUserMedia(type), SDPStreamMetadataPurpose.Usermedia); + + if (metadata != null) { + _updateRemoteSDPStreamMetadata(metadata); + } + + await pc.setRemoteDescription(offer); + + setCallState(CallState.kRinging); + + ringingTimer = Timer(Duration(milliseconds: lifetime - 3000), () { + if (state == CallState.kRinging) { + Logs().v('[VOIP] Call invite has expired. Hanging up.'); + hangupParty = CallParty.kRemote; // effectively + setCallState(CallState.kEnded); + emit(CallEvent.kHangup); + } + ringingTimer?.cancel(); + ringingTimer = null; + }); + } + + void initWithHangup() { + setCallState(CallState.kEnded); + } + + void onAnswerReceived( + RTCSessionDescription answer, SDPStreamMetadata? metadata) async { + if (metadata != null) { + _updateRemoteSDPStreamMetadata(metadata); + } + + if (direction == Direction.kOutgoing) { + setCallState(CallState.kConnecting); + await pc.setRemoteDescription(answer); + remoteCandidates.forEach((candidate) => pc.addCandidate(candidate)); + } + } + + void onNegotiateReceived( + SDPStreamMetadata? metadata, RTCSessionDescription description) async { + final polite = direction == Direction.kIncoming; + + // Here we follow the perfect negotiation logic from + // https://developer.mozilla.org/en-US/docs/Web/API/WebRTC_API/Perfect_negotiation + final offerCollision = ((description.type == 'offer') && + (makingOffer || + pc.signalingState != RTCSignalingState.RTCSignalingStateStable)); + + ignoreOffer = !polite && offerCollision; + if (ignoreOffer) { + Logs().i('Ignoring colliding negotiate event because we\'re impolite'); + return; + } + + final prevLocalOnHold = await isLocalOnHold(); + + if (metadata != null) { + _updateRemoteSDPStreamMetadata(metadata); + } + + try { + await pc.setRemoteDescription(description); + if (description.type == 'offer') { + final answer = await pc.createAnswer(); + await room.sendCallNegotiate( + callId, lifetimeMs, localPartyId, answer.sdp!, + type: answer.type!); + await pc.setLocalDescription(answer); + } + } catch (e) { + _getLocalOfferFailed(e); + Logs().e('[VOIP] onNegotiateReceived => ${e.toString()}'); + return; + } + + final newLocalOnHold = await isLocalOnHold(); + if (prevLocalOnHold != newLocalOnHold) { + localHold = newLocalOnHold; + emit(CallEvent.kLocalHoldUnhold, newLocalOnHold); + } + } + + void _updateRemoteSDPStreamMetadata(SDPStreamMetadata metadata) { + remoteSDPStreamMetadata = metadata; + remoteSDPStreamMetadata!.sdpStreamMetadatas + .forEach((streamId, sdpStreamMetadata) { + Logs().i( + 'Stream purpose update: \nid = "$streamId", \npurpose = "${sdpStreamMetadata.purpose}", \naudio_muted = ${sdpStreamMetadata.audio_muted}, \nvideo_muted = ${sdpStreamMetadata.video_muted}'); + }); + getRemoteStreams.forEach((wpstream) { + final streamId = wpstream.stream!.id; + final purpose = metadata.sdpStreamMetadatas[streamId]; + if (purpose != null) { + wpstream + .setAudioMuted(metadata.sdpStreamMetadatas[streamId]!.audio_muted); + wpstream + .setVideoMuted(metadata.sdpStreamMetadatas[streamId]!.video_muted); + wpstream.purpose = metadata.sdpStreamMetadatas[streamId]!.purpose; + } else { + Logs().i('Not found purpose for remote stream $streamId, remove it?'); + wpstream.stopped = true; + emit(CallEvent.kFeedsChanged, streams); + } + }); + } + + void onSDPStreamMetadataReceived(SDPStreamMetadata metadata) async { + _updateRemoteSDPStreamMetadata(metadata); + emit(CallEvent.kFeedsChanged, streams); + } + + void onCandidatesReceived(List candidates) { + candidates.forEach((json) async { + final candidate = RTCIceCandidate( + json['candidate'], + json['sdpMid'] ?? '', + json['sdpMLineIndex']?.round() ?? 0, + ); + + if (pc != null && inviteOrAnswerSent && remotePartyId != null) { + try { + await pc.addCandidate(candidate); + } catch (e) { + Logs().e('[VOIP] onCandidatesReceived => ${e.toString()}'); + } + } else { + remoteCandidates.add(candidate); + } + }); + + if (pc != null && + pc.iceConnectionState == + RTCIceConnectionState.RTCIceConnectionStateDisconnected) { + _restartIce(); + } + } + + void onAssertedIdentityReceived(AssertedIdentity identity) async { + remoteAssertedIdentity = identity; + emit(CallEvent.kAssertedIdentityChanged); + } + + bool get screensharingEnabled => localScreenSharingStream != null; + + Future setScreensharingEnabled(bool enabled) async { + // Skip if there is nothing to do + if (enabled && localScreenSharingStream != null) { + Logs().w( + 'There is already a screensharing stream - there is nothing to do!'); + return true; + } else if (!enabled && localScreenSharingStream == null) { + Logs().w( + 'There already isn\'t a screensharing stream - there is nothing to do!'); + return false; + } + + Logs().d('Set screensharing enabled? $enabled'); + + if (enabled) { + try { + final MediaStream? stream = await _getDisplayMedia(); + if (stream == null) { + return false; + } + _addLocalStream(stream, SDPStreamMetadataPurpose.Screenshare); + return true; + } catch (err) { + emit( + CallEvent.kError, + CallError(CallErrorCode.NoUserMedia, + 'Failed to get screen-sharing stream: ', err)); + return false; + } + } else { + for (final sender in screensharingSenders) { + await pc.removeTrack(sender); + } + for (final track in localScreenSharingStream!.stream!.getTracks()) { + await track.stop(); + } + localScreenSharingStream!.stopped = true; + emit(CallEvent.kFeedsChanged, streams); + return false; + } + } + + void _addLocalStream(MediaStream stream, String purpose, + {bool addToPeerConnection = true}) async { + final WrappedMediaStream? existingStream = getLocalStreams.firstWhere( + (element) => element.purpose == purpose, + orElse: () => Null as WrappedMediaStream); + if (existingStream != null) { + existingStream.setNewStream(stream); + } else { + final newStream = WrappedMediaStream( + userId: client.userID!, + room: opts.room, + stream: stream, + purpose: purpose, + client: client, + audioMuted: stream.getAudioTracks().isEmpty, + videoMuted: stream.getVideoTracks().isEmpty, + ); + await newStream.initialize(); + streams[stream.id] = newStream; + emit(CallEvent.kFeedsChanged, streams); + } + + if (addToPeerConnection) { + if (purpose == SDPStreamMetadataPurpose.Screenshare) { + screensharingSenders.clear(); + stream.getTracks().forEach((track) async { + screensharingSenders.add(await pc.addTrack(track, stream)); + }); + } else if (purpose == SDPStreamMetadataPurpose.Usermedia) { + usermediaSenders.clear(); + stream.getTracks().forEach((track) async { + usermediaSenders.add(await pc.addTrack(track, stream)); + }); + } + emit(CallEvent.kFeedsChanged, streams); + } + + if (purpose == SDPStreamMetadataPurpose.Usermedia) { + speakerOn = type == CallType.kVideo; + if (!kIsWeb && !voip.background) { + final audioTrack = stream.getAudioTracks()[0]; + audioTrack.enableSpeakerphone(speakerOn); + } + } + } + + void _addRemoteStream(MediaStream stream) async { + //const userId = this.getOpponentMember().userId; + final metadata = remoteSDPStreamMetadata!.sdpStreamMetadatas[stream.id]; + if (metadata == null) { + Logs().i( + 'Ignoring stream with id ${stream.id} because we didn\'t get any metadata about it'); + return; + } + + final purpose = metadata.purpose; + final audioMuted = metadata.audio_muted; + final videoMuted = metadata.video_muted; + + // Try to find a feed with the same purpose as the new stream, + // if we find it replace the old stream with the new one + final WrappedMediaStream? existingStream = getRemoteStreams.firstWhere( + (element) => element.purpose == purpose, + orElse: () => Null as WrappedMediaStream); + if (existingStream != null) { + existingStream.setNewStream(stream); + } else { + final newStream = WrappedMediaStream( + userId: remoteUser.id, + room: opts.room, + stream: stream, + purpose: purpose, + client: client, + audioMuted: audioMuted, + videoMuted: videoMuted, + ); + await newStream.initialize(); + streams[stream.id] = newStream; + } + emit(CallEvent.kFeedsChanged, streams); + Logs().i('Pushed remote stream (id="${stream.id}", purpose=$purpose)'); + } + + void setCallState(CallState newState) { + final oldState = state; + state = newState; + _callStateController.add(newState); + emit(CallEvent.kState, state, oldState); + } + + void setLocalVideoMuted(bool muted) { + localUserMediaStream?.setVideoMuted(muted); + _updateMuteStatus(); + } + + bool get isLocalVideoMuted => localUserMediaStream?.isVideoMuted() ?? false; + + void setMicrophoneMuted(bool muted) { + localUserMediaStream?.setAudioMuted(muted); + _updateMuteStatus(); + } + + bool get isMicrophoneMuted => localUserMediaStream?.isAudioMuted() ?? false; + + void setRemoteOnHold(bool onHold) async { + if (isRemoteOnHold == onHold) return; + remoteOnHold = onHold; + final transceivers = await pc.getTransceivers(); + for (final transceiver in transceivers) { + await transceiver.setDirection(onHold + ? TransceiverDirection.SendOnly + : TransceiverDirection.SendRecv); + } + _updateMuteStatus(); + emit(CallEvent.kRemoteHoldUnhold, remoteOnHold); + } + + bool get isRemoteOnHold => remoteOnHold; + + Future isLocalOnHold() async { + if (state != CallState.kConnected) return false; + var callOnHold = true; + // We consider a call to be on hold only if *all* the tracks are on hold + // (is this the right thing to do?) + final transceivers = await pc.getTransceivers(); + for (final transceiver in transceivers) { + final currentDirection = await transceiver.getCurrentDirection(); + Logs() + .i('transceiver.currentDirection = ${currentDirection?.toString()}'); + final trackOnHold = (currentDirection == TransceiverDirection.Inactive || + currentDirection == TransceiverDirection.RecvOnly); + if (!trackOnHold) { + callOnHold = false; + } + } + return callOnHold; + } + + void setSpeakerOn() { + speakerOn = !speakerOn; + } + + Future switchCamera() async { + if (localUserMediaStream != null) { + await Helper.switchCamera( + localUserMediaStream!.stream!.getVideoTracks()[0]); + if (kIsMobile) { + facingMode == 'user' ? facingMode = 'environment' : facingMode = 'user'; + } + } + } + + void answer() async { + if (inviteOrAnswerSent) { + return; + } + // stop play ringtone + voip.stopRingTone(); + + if (direction == Direction.kIncoming) { + setCallState(CallState.kCreateAnswer); + + final answer = await pc.createAnswer({}); + remoteCandidates.forEach((candidate) => pc.addCandidate(candidate)); + + final callCapabilities = CallCapabilities() + ..dtmf = false + ..transferee = false; + + final metadata = SDPStreamMetadata({ + localUserMediaStream!.stream!.id: SDPStreamPurpose( + purpose: SDPStreamMetadataPurpose.Usermedia, + audio_muted: localUserMediaStream!.stream!.getAudioTracks().isEmpty, + video_muted: localUserMediaStream!.stream!.getVideoTracks().isEmpty) + }); + + final res = await room.answerCall(callId, answer.sdp!, localPartyId, + type: answer.type!, + capabilities: callCapabilities, + metadata: metadata); + Logs().v('[VOIP] answer res => $res'); + await pc.setLocalDescription(answer); + setCallState(CallState.kConnecting); + inviteOrAnswerSent = true; + _answeredByUs = true; + } + } + + /// Reject a call + /// This used to be done by calling hangup, but is a separate method and protocol + /// event as of MSC2746. + /// + void reject() { + if (state != CallState.kRinging) { + Logs().e('[VOIP] Call must be in \'ringing\' state to reject!'); + return; + } + Logs().d('[VOIP] Rejecting call: $callId'); + terminate(CallParty.kLocal, CallErrorCode.UserHangup, true); + room.sendCallReject(callId, lifetimeMs, localPartyId); + } + + void hangup([String? reason, bool suppressEvent = true]) async { + // stop play ringtone + voip.stopRingTone(); + + terminate( + CallParty.kLocal, reason ?? CallErrorCode.UserHangup, !suppressEvent); + + try { + final res = await room.hangupCall(callId, localPartyId, 'userHangup'); + Logs().v('[VOIP] hangup res => $res'); + } catch (e) { + Logs().v('[VOIP] hangup error => ${e.toString()}'); + } + } + + void sendDTMF(String tones) async { + final senders = await pc.getSenders(); + for (final sender in senders) { + if (sender.track != null && sender.track!.kind == 'audio') { + await sender.dtmfSender.insertDTMF(tones); + return; + } + } + Logs().e('Unable to find a track to send DTMF on'); + } + + void terminate(CallParty party, String hangupReason, bool shouldEmit) async { + if (state == CallState.kEnded) { + return; + } + + inviteTimer?.cancel(); + inviteTimer = null; + + ringingTimer?.cancel(); + ringingTimer = null; + + hangupParty = party; + hangupReason = hangupReason; + + setCallState(CallState.kEnded); + voip.currentCID = null; + voip.calls.remove(callId); + + if (shouldEmit) { + emit(CallEvent.kHangup, this); + } + } + + void onRejectReceived(String reason) { + Logs().v('[VOIP] Reject received for call ID ' + callId); + // No need to check party_id for reject because if we'd received either + // an answer or reject, we wouldn't be in state InviteSent + final shouldTerminate = + (state == CallState.kFledgling && direction == Direction.kIncoming) || + CallState.kInviteSent == state || + CallState.kRinging == state; + + if (shouldTerminate) { + terminate(CallParty.kRemote, reason ?? CallErrorCode.UserHangup, true); + } else { + Logs().e('Call is in state: ${state.toString()}: ignoring reject'); + } + } + + Future _gotLocalOffer(RTCSessionDescription offer) async { + if (callHasEnded) { + Logs().d( + 'Ignoring newly created offer on call ID ${opts.callId} because the call has ended'); + return; + } + + try { + await pc.setLocalDescription(offer); + } catch (err) { + Logs().d('Error setting local description! ${err.toString()}'); + terminate(CallParty.kLocal, CallErrorCode.SetLocalDescription, true); + return; + } + + if (callHasEnded) return; + + final callCapabilities = CallCapabilities() + ..dtmf = false + ..transferee = false; + final metadata = _getLocalSDPStreamMetadata(); + if (state == CallState.kCreateOffer) { + await room.inviteToCall( + callId, lifetimeMs, localPartyId, null, offer.sdp!, + capabilities: callCapabilities, metadata: metadata); + inviteOrAnswerSent = true; + setCallState(CallState.kInviteSent); + + inviteTimer = Timer(Duration(seconds: callTimeoutSec), () { + if (state == CallState.kInviteSent) { + hangup(CallErrorCode.InviteTimeout, false); + } + inviteTimer?.cancel(); + inviteTimer = null; + }); + } else { + await room.sendCallNegotiate(callId, lifetimeMs, localPartyId, offer.sdp!, + type: offer.type!, + capabilities: callCapabilities, + metadata: metadata); + } + } + + void onNegotiationNeeded() async { + Logs().i('Negotiation is needed!'); + makingOffer = true; + try { + final offer = await pc.createOffer({}); + await _gotLocalOffer(offer); + } catch (e) { + _getLocalOfferFailed(e); + return; + } finally { + makingOffer = false; + } + } + + Future _preparePeerConnection() async { + try { + pc = await _createPeerConnection(); + + pc.onRenegotiationNeeded = onNegotiationNeeded; + + pc.onIceCandidate = (RTCIceCandidate candidate) async { + //Logs().v('[VOIP] onIceCandidate => ${candidate.toMap().toString()}'); + localCandidates.add(candidate); + }; + pc.onIceGatheringState = (RTCIceGatheringState state) async { + Logs().v('[VOIP] IceGatheringState => ${state.toString()}'); + if (state == RTCIceGatheringState.RTCIceGatheringStateGathering) { + Timer(Duration(milliseconds: 3000), () async { + if (!iceGatheringFinished) { + iceGatheringFinished = true; + await _candidateReady(); + } + }); + } + if (state == RTCIceGatheringState.RTCIceGatheringStateComplete) { + if (!iceGatheringFinished) { + iceGatheringFinished = true; + await _candidateReady(); + } + } + }; + pc.onIceConnectionState = (RTCIceConnectionState state) { + Logs().v('[VOIP] RTCIceConnectionState => ${state.toString()}'); + if (state == RTCIceConnectionState.RTCIceConnectionStateConnected) { + localCandidates.clear(); + remoteCandidates.clear(); + setCallState(CallState.kConnected); + } else if (state == RTCIceConnectionState.RTCIceConnectionStateFailed) { + hangup(CallErrorCode.IceFailed, false); + } + }; + } catch (e) { + Logs().v('[VOIP] prepareMediaStream error => ${e.toString()}'); + } + } + + void onAnsweredElsewhere(String msg) { + Logs().d('Call ID $callId answered elsewhere'); + terminate(CallParty.kRemote, CallErrorCode.AnsweredElsewhere, true); + } + + void cleanUp() async { + streams.forEach((id, stream) { + stream.dispose(); + }); + streams.clear(); + if (pc != null) { + await pc.close(); + await pc.dispose(); + } + } + + void _updateMuteStatus() async { + final micShouldBeMuted = (localUserMediaStream != null && + localUserMediaStream!.isAudioMuted()) || + remoteOnHold; + final vidShouldBeMuted = (localUserMediaStream != null && + localUserMediaStream!.isVideoMuted()) || + remoteOnHold; + + _setTracksEnabled(localUserMediaStream?.stream!.getAudioTracks() ?? [], + !micShouldBeMuted); + _setTracksEnabled(localUserMediaStream?.stream!.getVideoTracks() ?? [], + !vidShouldBeMuted); + + await opts.room.sendSDPStreamMetadataChanged( + callId, localPartyId, _getLocalSDPStreamMetadata()); + } + + void _setTracksEnabled(List tracks, bool enabled) { + tracks.forEach((track) async { + track.enabled = enabled; + }); + } + + SDPStreamMetadata _getLocalSDPStreamMetadata() { + final sdpStreamMetadatas = {}; + for (final wpstream in getLocalStreams) { + sdpStreamMetadatas[wpstream.stream!.id] = SDPStreamPurpose( + purpose: wpstream.purpose, + audio_muted: wpstream.audioMuted, + video_muted: wpstream.videoMuted); + } + final metadata = SDPStreamMetadata(sdpStreamMetadatas); + Logs().v('Got local SDPStreamMetadata ${metadata.toJson().toString()}'); + return metadata; + } + + void _restartIce() async { + Logs().v('[VOIP] iceRestart.'); + // Needs restart ice on session.pc and renegotiation. + iceGatheringFinished = false; + final desc = + await pc.createOffer(_getOfferAnswerConstraints(iceRestart: true)); + await pc.setLocalDescription(desc); + localCandidates.clear(); + } + + Future _getUserMedia(CallType type) async { + final mediaConstraints = { + 'audio': true, + 'video': type == CallType.kVideo + ? { + 'mandatory': { + 'minWidth': '640', + 'minHeight': '480', + 'minFrameRate': '30', + }, + 'facingMode': 'user', + 'optional': [], + } + : false, + }; + try { + return await navigator.mediaDevices.getUserMedia(mediaConstraints); + } catch (e) { + _getUserMediaFailed(e); + } + return Null as MediaStream; + } + + Future _getDisplayMedia() async { + final mediaConstraints = { + 'audio': false, + 'video': true, + }; + try { + return await navigator.mediaDevices.getDisplayMedia(mediaConstraints); + } catch (e) { + _getUserMediaFailed(e); + } + return Null as MediaStream; + } + + Future _createPeerConnection() async { + final configuration = { + 'iceServers': opts.iceServers, + 'sdpSemantics': 'unified-plan' + }; + final pc = await createPeerConnection(configuration); + pc.onTrack = (RTCTrackEvent event) { + if (event.streams.isNotEmpty) { + final stream = event.streams[0]; + _addRemoteStream(stream); + } + }; + return pc; + } + + void tryRemoveStopedStreams() { + final removedStreams = {}; + streams.forEach((id, stream) { + if (stream.stopped) { + removedStreams[id] = stream; + } + }); + streams.removeWhere((id, stream) => removedStreams.containsKey(id)); + removedStreams.forEach((id, element) { + _removeStream(id); + }); + } + + Future _removeStream(String streamId) async { + Logs().v('Removing feed with stream id $streamId'); + final removedStream = streams.remove(streamId); + if (removedStream == null) { + Logs().v('Didn\'t find the feed with stream id $streamId to delete'); + return; + } + await removedStream.dispose(); + } + + Map _getOfferAnswerConstraints({bool iceRestart = false}) { + return { + 'mandatory': {if (iceRestart) 'IceRestart': true}, + 'optional': [], + }; + } + + Future _candidateReady() async { + /* + Currently, trickle-ice is not supported, so it will take a + long time to wait to collect all the canidates, set the + timeout for collection canidates to speed up the connection. + */ + try { + final candidates = >[]; + localCandidates.forEach((element) { + candidates.add(element.toMap()); + }); + final res = + await room.sendCallCandidates(callId, localPartyId, candidates); + Logs().v('[VOIP] sendCallCandidates res => $res'); + } catch (e) { + Logs().v('[VOIP] sendCallCandidates e => ${e.toString()}'); + } + } + + void emit(CallEvent event, [dynamic arg1, dynamic arg2, dynamic arg3]) { + _callEventController.add(event); + Logs().i('CallEvent: ${event.toString()}'); + switch (event) { + case CallEvent.kFeedsChanged: + break; + case CallEvent.kState: + Logs().i('CallState: ${state.toString()}'); + break; + case CallEvent.kError: + break; + case CallEvent.kHangup: + break; + case CallEvent.kReplaced: + break; + case CallEvent.kLocalHoldUnhold: + break; + case CallEvent.kRemoteHoldUnhold: + break; + case CallEvent.kAssertedIdentityChanged: + break; + } + } + + void _getLocalOfferFailed(dynamic err) { + Logs().e('Failed to get local offer ${err.toString()}'); + + emit( + CallEvent.kError, + CallError( + CallErrorCode.LocalOfferFailed, + 'Failed to get local offer!', + err, + ), + ); + terminate(CallParty.kLocal, CallErrorCode.LocalOfferFailed, false); + } + + void _getUserMediaFailed(dynamic err) { + Logs().w('Failed to get user media - ending call ${err.toString()}'); + emit( + CallEvent.kError, + CallError( + CallErrorCode.NoUserMedia, + 'Couldn\'t start capturing media! Is your microphone set up and does this app have permission?', + err, + ), + ); + terminate(CallParty.kLocal, CallErrorCode.NoUserMedia, false); + } + + void onSelectAnswerReceived(String selectedPartyId) { + if (direction != Direction.kIncoming) { + Logs().w('Got select_answer for an outbound call: ignoring'); + return; + } + if (selectedPartyId == null) { + Logs().w( + 'Got nonsensical select_answer with null/undefined selected_party_id: ignoring'); + return; + } + + if (selectedPartyId != localPartyId) { + Logs().w( + 'Got select_answer for party ID $selectedPartyId: we are party ID $localPartyId.'); + // The other party has picked somebody else's answer + terminate(CallParty.kRemote, CallErrorCode.AnsweredElsewhere, true); + } + } +} + +class VoIP with WidgetsBindingObserver { + TurnServerCredentials? _turnServerCredentials; + Map calls = {}; + String? currentCID; + //ConnectivityResult _currentConnectivity; + Function(CallSession session)? onIncomingCall; + OverlayEntry? overlayEntry; + String? get localPartyId => client.deviceID; + bool background = false; + Client client; + + @override + void didChangeAppLifecycleState(AppLifecycleState state) { + Logs().v('AppLifecycleState = $state'); + background = !(state != AppLifecycleState.detached && + state != AppLifecycleState.paused); + } + + void addCallingOverlay( + BuildContext context, String callId, CallSession call) { + if (overlayEntry != null) { + Logs().w('[VOIP] addCallingOverlay: The call session already exists?'); + overlayEntry?.remove(); + } + /* TODO: + final overlay = Overlay.of(context); + overlayEntry = OverlayEntry( + builder: (_) => Calling( + context: famedly.context, + client: client, + callId: callId, + call: call, + onClear: () { + overlayEntry?.remove(); + overlayEntry = null; + }), + ); + overlay.insert(overlayEntry); + */ + } + + VoIP(this.client) : super() { + client.onCallInvite.stream.listen(onCallInvite); + client.onCallAnswer.stream.listen(onCallAnswer); + client.onCallCandidates.stream.listen(onCallCandidates); + client.onCallHangup.stream.listen(onCallHangup); + client.onCallReject.stream.listen(onCallReject); + client.onCallNegotiate.stream.listen(onCallNegotiate); + client.onCallReplaces.stream.listen(onCallReplaces); + client.onCallSelectAnswer.stream.listen(onCallSelectAnswer); + client.onSDPStreamMetadataChangedReceived.stream + .listen(onSDPStreamMetadataChangedReceived); + client.onAssertedIdentityReceived.stream.listen(onAssertedIdentityReceived); + /* + Connectivity().onConnectivityChanged.listen(_handleNetworkChanged); + Connectivity() + .checkConnectivity() + .then((result) => _currentConnectivity = result) + .catchError((e) => _currentConnectivity = ConnectivityResult.none); + */ + + if (!kIsWeb) { + final wb = WidgetsBinding.instance; + wb!.addObserver(this); + didChangeAppLifecycleState(wb.lifecycleState!); + } + } + + Future onCallInvite(Event event) async { + if (event.senderId == client.userID) { + // Ignore messages to yourself. + return; + } + + Logs().v( + '[VOIP] onCallInvite ${event.senderId} => ${client.userID}, \ncontent => ${event.content.toString()}'); + + final String callId = event.content['call_id']; + final String partyId = event.content['party_id']; + final int lifetime = event.content['lifetime']; + + if (currentCID != null) { + // Only one session at a time. + Logs().v('[VOIP] onCallInvite: There is already a session.'); + await event.room.hangupCall(callId, localPartyId!, 'userBusy'); + return; + } + if (calls[callId] != null) { + // Session already exist. + Logs().v('[VOIP] onCallInvite: Session [$callId] already exist.'); + return; + } + + if (event.content['capabilities'] != null) { + final capabilities = + CallCapabilities.fromJson(event.content['capabilities']); + Logs().v( + '[VOIP] CallCapabilities: dtmf => ${capabilities.dtmf}, transferee => ${capabilities.transferee}'); + } + + var callType = CallType.kVoice; + SDPStreamMetadata? sdpStreamMetadata; + if (event.content[sdpStreamMetadataKey] != null) { + sdpStreamMetadata = + SDPStreamMetadata.fromJson(event.content[sdpStreamMetadataKey]); + sdpStreamMetadata.sdpStreamMetadatas + .forEach((streamId, SDPStreamPurpose purpose) { + Logs().v( + '[VOIP] [$streamId] => purpose: ${purpose.purpose}, audioMuted: ${purpose.audio_muted}, videoMuted: ${purpose.video_muted}'); + + if (!purpose.video_muted) { + callType = CallType.kVideo; + } + }); + } else { + callType = getCallType(event.content['offer']['sdp']); + } + + final opts = CallOptions() + ..voip = this + ..callId = callId + ..dir = Direction.kIncoming + ..type = callType + ..room = event.room + ..localPartyId = localPartyId! + ..iceServers = await getIceSevers(); + + final newCall = createNewCall(opts); + newCall.remotePartyId = partyId; + newCall.remoteUser = event.sender; + final offer = RTCSessionDescription( + event.content['offer']['sdp'], + event.content['offer']['type'], + ); + await newCall + .initWithInvite(callType, offer, sdpStreamMetadata, lifetime) + .then((_) { + // Popup CallingPage for incoming call. + if (!background) { + //TODO: + //addCallingOverlay(famedly.context, callId, newCall); + } + }); + currentCID = callId; + + if (background) { + /// Forced to enable signaling synchronization until the end of the call. + client.backgroundSync = true; + + /// Handle incoming call for callkeep plugin. + onIncomingCall?.call(newCall); + } + // Play ringtone + playRingtone(); + } + + void playRingtone() async { + if (!background) { + try { + // TODO: callback the event to the user. + // await UserMediaManager().startRinginTone(); + } catch (_) {} + } + } + + void stopRingTone() async { + if (!background) { + try { + // TODO: + // await UserMediaManager().stopRingingTone(); + } catch (_) {} + } + } + + void onCallAnswer(Event event) async { + Logs().v('[VOIP] onCallAnswer => ${event.content.toString()}'); + final String callId = event.content['call_id']; + final String partyId = event.content['party_id']; + + final call = calls[callId]; + if (call != null) { + if (event.senderId == client.userID) { + // Ignore messages to yourself. + if (!call._answeredByUs) { + stopRingTone(); + } + return; + } + + call.remotePartyId = partyId; + call.remoteUser = event.sender; + + final answer = RTCSessionDescription( + event.content['answer']['sdp'], event.content['answer']['type']); + + SDPStreamMetadata? metadata; + if (event.content[sdpStreamMetadataKey] != null) { + metadata = + SDPStreamMetadata.fromJson(event.content[sdpStreamMetadataKey]); + } + call.onAnswerReceived(answer, metadata); + + /// Send select_answer event. + await event.room.selectCallAnswer( + callId, lifetimeMs, localPartyId!, call.remotePartyId); + } else { + Logs().v('[VOIP] onCallAnswer: Session [$callId] not found!'); + } + } + + void onCallCandidates(Event event) async { + if (event.senderId == client.userID) { + // Ignore messages to yourself. + return; + } + Logs().v('[VOIP] onCallCandidates => ${event.content.toString()}'); + final String callId = event.content['call_id']; + final call = calls[callId]; + if (call != null) { + call.onCandidatesReceived(event.content['candidates']); + } else { + Logs().v('[VOIP] onCallCandidates: Session [$callId] not found!'); + } + } + + void onCallHangup(Event event) async { + // stop play ringtone, if this is an incoming call + if (!background) { + stopRingTone(); + } + Logs().v('[VOIP] onCallHangup => ${event.content.toString()}'); + final String callId = event.content['call_id']; + final call = calls[callId]; + if (call != null) { + // hangup in any case, either if the other party hung up or we did on another device + call.terminate(CallParty.kRemote, + event.content['reason'] ?? CallErrorCode.UserHangup, true); + + overlayEntry?.remove(); + overlayEntry = null; + } else { + Logs().v('[VOIP] onCallHangup: Session [$callId] not found!'); + } + currentCID = null; + } + + void onCallReject(Event event) async { + if (event.senderId == client.userID) { + // Ignore messages to yourself. + return; + } + final String callId = event.content['call_id']; + Logs().d('Reject received for call ID ' + callId); + + final call = calls[callId]; + if (call != null) { + call.onRejectReceived(event.content['reason']); + } else { + Logs().v('[VOIP] onCallHangup: Session [$callId] not found!'); + } + } + + void onCallReplaces(Event event) async { + if (event.senderId == client.userID) { + // Ignore messages to yourself. + return; + } + final String callId = event.content['call_id']; + Logs().d('onCallReplaces received for call ID ' + callId); + final call = calls[callId]; + if (call != null) { + //TODO: handle replaces + } + } + + void onCallSelectAnswer(Event event) async { + if (event.senderId == client.userID) { + // Ignore messages to yourself. + return; + } + final String callId = event.content['call_id']; + Logs().d('SelectAnswer received for call ID ' + callId); + final call = calls[callId]; + final String selectedPartyId = event.content['selected_party_id']; + + if (call != null) { + call.onSelectAnswerReceived(selectedPartyId); + } + } + + void onSDPStreamMetadataChangedReceived(Event event) async { + if (event.senderId == client.userID) { + // Ignore messages to yourself. + return; + } + final String callId = event.content['call_id']; + Logs().d('SDP Stream metadata received for call ID ' + callId); + final call = calls[callId]; + if (call != null) { + if (event.content[sdpStreamMetadataKey] == null) { + Logs().d('SDP Stream metadata is null'); + return; + } + call.onSDPStreamMetadataReceived( + SDPStreamMetadata.fromJson(event.content[sdpStreamMetadataKey])); + } + } + + void onAssertedIdentityReceived(Event event) async { + if (event.senderId == client.userID) { + // Ignore messages to yourself. + return; + } + final String callId = event.content['call_id']; + Logs().d('Asserted identity received for call ID ' + callId); + final call = calls[callId]; + if (call != null) { + if (event.content['asserted_identity'] == null) { + Logs().d('asserted_identity is null '); + return; + } + call.onAssertedIdentityReceived( + AssertedIdentity.fromJson(event.content['asserted_identity'])); + } + } + + void onCallNegotiate(Event event) async { + if (event.senderId == client.userID) { + // Ignore messages to yourself. + return; + } + final String callId = event.content['call_id']; + Logs().d('Negotiate received for call ID ' + callId); + final call = calls[callId]; + if (call != null) { + final description = event.content['description']; + try { + SDPStreamMetadata? metadata; + if (event.content[sdpStreamMetadataKey] != null) { + metadata = + SDPStreamMetadata.fromJson(event.content[sdpStreamMetadataKey]); + } + call.onNegotiateReceived(metadata, + RTCSessionDescription(description['sdp'], description['type'])); + } catch (err) { + Logs().e('Failed to complete negotiation ${err.toString()}'); + } + } + } + + CallType getCallType(String sdp) { + try { + final session = sdp_transform.parse(sdp); + if (session['media'].indexWhere((e) => e['type'] == 'video') != -1) { + return CallType.kVideo; + } + } catch (err) { + Logs().e('Failed to getCallType ${err.toString()}'); + } + + return CallType.kVoice; + } + + Future requestTurnServerCredentials() async { + return true; + } + + Future>> getIceSevers() async { + if (_turnServerCredentials == null) { + try { + _turnServerCredentials = await client.getTurnServer(); + } catch (e) { + Logs().v('[VOIP] getTurnServerCredentials error => ${e.toString()}'); + } + } + + if (_turnServerCredentials == null) { + return []; + } + + return [ + { + 'username': _turnServerCredentials!.username, + 'credential': _turnServerCredentials!.password, + 'url': _turnServerCredentials!.uris[0] + } + ]; + } + /* + void _handleNetworkChanged(ConnectivityResult result) async { + // Got a new connectivity status! + if (_currentConnectivity != result) { + calls.forEach((_, sess) { + sess._restartIce(); + }); + } + _currentConnectivity = result; + }*/ + + Future inviteToCall(String roomId, CallType type) async { + final room = client.getRoomById(roomId); + if (room == null) { + Logs().v('[VOIP] Invalid room id [$roomId].'); + return Null as CallSession; + } + final callId = 'cid${DateTime.now().millisecondsSinceEpoch}'; + final opts = CallOptions() + ..callId = callId + ..type = type + ..dir = Direction.kOutgoing + ..room = room + ..voip = this + ..localPartyId = localPartyId! + ..iceServers = await getIceSevers(); + + final newCall = createNewCall(opts); + currentCID = callId; + await newCall.initOutboundCall(type).then((_) { + if (!background) { + //TODO: + //addCallingOverlay(famdly.context, callId, newCall); + } + }); + currentCID = callId; + return newCall; + } + + CallSession createNewCall(CallOptions opts) { + final call = CallSession(opts); + calls[opts.callId] = call; + return call; + } +} diff --git a/pubspec.yaml b/pubspec.yaml index 40246e4c..9ac08b2f 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -23,6 +23,8 @@ dependencies: slugify: ^2.0.0 html: ^0.15.0 collection: ^1.15.0 + sdp_transform: ^0.3.2 + flutter_webrtc: ^0.7.1 dev_dependencies: dart_code_metrics: ^4.4.0 From 0822e3809d12595a9b9014f401bcd80bc89b06c8 Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Wed, 10 Nov 2021 21:47:16 +0800 Subject: [PATCH 02/15] chore: Null safety for voip.dart. --- lib/src/voip.dart | 66 +++++++++++++++++++++++------------------------ 1 file changed, 33 insertions(+), 33 deletions(-) diff --git a/lib/src/voip.dart b/lib/src/voip.dart index 25dafbff..bae91bb8 100644 --- a/lib/src/voip.dart +++ b/lib/src/voip.dart @@ -272,7 +272,7 @@ class CallSession { CallState state = CallState.kFledgling; bool get isOutgoing => direction == Direction.kOutgoing; bool get isRinging => state == CallState.kRinging; - late RTCPeerConnection pc; + RTCPeerConnection? pc; List remoteCandidates = []; List localCandidates = []; late AssertedIdentity remoteAssertedIdentity; @@ -287,7 +287,7 @@ class CallSession { bool ignoreOffer = false; String facingMode = 'user'; late Client client; - late String remotePartyId; + String? remotePartyId; late User remoteUser; late CallParty hangupParty; late String hangupReason; @@ -341,7 +341,7 @@ class CallSession { _updateRemoteSDPStreamMetadata(metadata); } - await pc.setRemoteDescription(offer); + await pc!.setRemoteDescription(offer); setCallState(CallState.kRinging); @@ -369,8 +369,8 @@ class CallSession { if (direction == Direction.kOutgoing) { setCallState(CallState.kConnecting); - await pc.setRemoteDescription(answer); - remoteCandidates.forEach((candidate) => pc.addCandidate(candidate)); + await pc!.setRemoteDescription(answer); + remoteCandidates.forEach((candidate) => pc!.addCandidate(candidate)); } } @@ -382,7 +382,7 @@ class CallSession { // https://developer.mozilla.org/en-US/docs/Web/API/WebRTC_API/Perfect_negotiation final offerCollision = ((description.type == 'offer') && (makingOffer || - pc.signalingState != RTCSignalingState.RTCSignalingStateStable)); + pc!.signalingState != RTCSignalingState.RTCSignalingStateStable)); ignoreOffer = !polite && offerCollision; if (ignoreOffer) { @@ -397,13 +397,13 @@ class CallSession { } try { - await pc.setRemoteDescription(description); + await pc!.setRemoteDescription(description); if (description.type == 'offer') { - final answer = await pc.createAnswer(); + final answer = await pc!.createAnswer(); await room.sendCallNegotiate( callId, lifetimeMs, localPartyId, answer.sdp!, type: answer.type!); - await pc.setLocalDescription(answer); + await pc!.setLocalDescription(answer); } } catch (e) { _getLocalOfferFailed(e); @@ -457,7 +457,7 @@ class CallSession { if (pc != null && inviteOrAnswerSent && remotePartyId != null) { try { - await pc.addCandidate(candidate); + await pc!.addCandidate(candidate); } catch (e) { Logs().e('[VOIP] onCandidatesReceived => ${e.toString()}'); } @@ -467,7 +467,7 @@ class CallSession { }); if (pc != null && - pc.iceConnectionState == + pc!.iceConnectionState == RTCIceConnectionState.RTCIceConnectionStateDisconnected) { _restartIce(); } @@ -511,7 +511,7 @@ class CallSession { } } else { for (final sender in screensharingSenders) { - await pc.removeTrack(sender); + await pc!.removeTrack(sender); } for (final track in localScreenSharingStream!.stream!.getTracks()) { await track.stop(); @@ -548,12 +548,12 @@ class CallSession { if (purpose == SDPStreamMetadataPurpose.Screenshare) { screensharingSenders.clear(); stream.getTracks().forEach((track) async { - screensharingSenders.add(await pc.addTrack(track, stream)); + screensharingSenders.add(await pc!.addTrack(track, stream)); }); } else if (purpose == SDPStreamMetadataPurpose.Usermedia) { usermediaSenders.clear(); stream.getTracks().forEach((track) async { - usermediaSenders.add(await pc.addTrack(track, stream)); + usermediaSenders.add(await pc!.addTrack(track, stream)); }); } emit(CallEvent.kFeedsChanged, streams); @@ -629,7 +629,7 @@ class CallSession { void setRemoteOnHold(bool onHold) async { if (isRemoteOnHold == onHold) return; remoteOnHold = onHold; - final transceivers = await pc.getTransceivers(); + final transceivers = await pc!.getTransceivers(); for (final transceiver in transceivers) { await transceiver.setDirection(onHold ? TransceiverDirection.SendOnly @@ -646,7 +646,7 @@ class CallSession { var callOnHold = true; // We consider a call to be on hold only if *all* the tracks are on hold // (is this the right thing to do?) - final transceivers = await pc.getTransceivers(); + final transceivers = await pc!.getTransceivers(); for (final transceiver in transceivers) { final currentDirection = await transceiver.getCurrentDirection(); Logs() @@ -684,8 +684,8 @@ class CallSession { if (direction == Direction.kIncoming) { setCallState(CallState.kCreateAnswer); - final answer = await pc.createAnswer({}); - remoteCandidates.forEach((candidate) => pc.addCandidate(candidate)); + final answer = await pc!.createAnswer({}); + remoteCandidates.forEach((candidate) => pc!.addCandidate(candidate)); final callCapabilities = CallCapabilities() ..dtmf = false @@ -703,7 +703,7 @@ class CallSession { capabilities: callCapabilities, metadata: metadata); Logs().v('[VOIP] answer res => $res'); - await pc.setLocalDescription(answer); + await pc!.setLocalDescription(answer); setCallState(CallState.kConnecting); inviteOrAnswerSent = true; _answeredByUs = true; @@ -740,7 +740,7 @@ class CallSession { } void sendDTMF(String tones) async { - final senders = await pc.getSenders(); + final senders = await pc!.getSenders(); for (final sender in senders) { if (sender.track != null && sender.track!.kind == 'audio') { await sender.dtmfSender.insertDTMF(tones); @@ -773,7 +773,7 @@ class CallSession { } } - void onRejectReceived(String reason) { + void onRejectReceived(String? reason) { Logs().v('[VOIP] Reject received for call ID ' + callId); // No need to check party_id for reject because if we'd received either // an answer or reject, we wouldn't be in state InviteSent @@ -797,7 +797,7 @@ class CallSession { } try { - await pc.setLocalDescription(offer); + await pc!.setLocalDescription(offer); } catch (err) { Logs().d('Error setting local description! ${err.toString()}'); terminate(CallParty.kLocal, CallErrorCode.SetLocalDescription, true); @@ -836,7 +836,7 @@ class CallSession { Logs().i('Negotiation is needed!'); makingOffer = true; try { - final offer = await pc.createOffer({}); + final offer = await pc!.createOffer({}); await _gotLocalOffer(offer); } catch (e) { _getLocalOfferFailed(e); @@ -850,13 +850,13 @@ class CallSession { try { pc = await _createPeerConnection(); - pc.onRenegotiationNeeded = onNegotiationNeeded; + pc!.onRenegotiationNeeded = onNegotiationNeeded; - pc.onIceCandidate = (RTCIceCandidate candidate) async { + pc!.onIceCandidate = (RTCIceCandidate candidate) async { //Logs().v('[VOIP] onIceCandidate => ${candidate.toMap().toString()}'); localCandidates.add(candidate); }; - pc.onIceGatheringState = (RTCIceGatheringState state) async { + pc!.onIceGatheringState = (RTCIceGatheringState state) async { Logs().v('[VOIP] IceGatheringState => ${state.toString()}'); if (state == RTCIceGatheringState.RTCIceGatheringStateGathering) { Timer(Duration(milliseconds: 3000), () async { @@ -873,7 +873,7 @@ class CallSession { } } }; - pc.onIceConnectionState = (RTCIceConnectionState state) { + pc!.onIceConnectionState = (RTCIceConnectionState state) { Logs().v('[VOIP] RTCIceConnectionState => ${state.toString()}'); if (state == RTCIceConnectionState.RTCIceConnectionStateConnected) { localCandidates.clear(); @@ -899,8 +899,8 @@ class CallSession { }); streams.clear(); if (pc != null) { - await pc.close(); - await pc.dispose(); + await pc!.close(); + await pc!.dispose(); } } @@ -945,8 +945,8 @@ class CallSession { // Needs restart ice on session.pc and renegotiation. iceGatheringFinished = false; final desc = - await pc.createOffer(_getOfferAnswerConstraints(iceRestart: true)); - await pc.setLocalDescription(desc); + await pc!.createOffer(_getOfferAnswerConstraints(iceRestart: true)); + await pc!.setLocalDescription(desc); localCandidates.clear(); } @@ -1101,7 +1101,7 @@ class CallSession { terminate(CallParty.kLocal, CallErrorCode.NoUserMedia, false); } - void onSelectAnswerReceived(String selectedPartyId) { + void onSelectAnswerReceived(String? selectedPartyId) { if (direction != Direction.kIncoming) { Logs().w('Got select_answer for an outbound call: ignoring'); return; @@ -1325,7 +1325,7 @@ class VoIP with WidgetsBindingObserver { /// Send select_answer event. await event.room.selectCallAnswer( - callId, lifetimeMs, localPartyId!, call.remotePartyId); + callId, lifetimeMs, localPartyId!, call.remotePartyId!); } else { Logs().v('[VOIP] onCallAnswer: Session [$callId] not found!'); } From 56d9ba7d4a57af8e18f2ba51d107a4e6013ee5e4 Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Mon, 15 Nov 2021 22:31:38 +0800 Subject: [PATCH 03/15] refactor: [WIP] Voip abstract interface. --- lib/src/voip.dart | 8 +-- lib/src/voip_abstract.dart | 116 +++++++++++++++++++++++++++++++++++++ pubspec.yaml | 1 - 3 files changed, 117 insertions(+), 8 deletions(-) create mode 100644 lib/src/voip_abstract.dart diff --git a/lib/src/voip.dart b/lib/src/voip.dart index bae91bb8..6d82ba5a 100644 --- a/lib/src/voip.dart +++ b/lib/src/voip.dart @@ -1,17 +1,11 @@ import 'dart:async'; import 'dart:core'; -import 'dart:io'; -import 'package:flutter/foundation.dart'; -import 'package:flutter/widgets.dart'; - -import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'voip_abstract.dart'; import 'package:sdp_transform/sdp_transform.dart' as sdp_transform; import '../matrix.dart'; -bool get kIsMobile => !kIsWeb && (Platform.isAndroid || Platform.isIOS); - /// The default life time for call events, in millisecond. const lifetimeMs = 10 * 1000; diff --git a/lib/src/voip_abstract.dart b/lib/src/voip_abstract.dart new file mode 100644 index 00000000..392e5d36 --- /dev/null +++ b/lib/src/voip_abstract.dart @@ -0,0 +1,116 @@ +import 'dart:io'; + +class MediaStreamTrack { + bool get enabled => false; + set enabled(bool value) {} + String get kind => throw UnimplementedError(); + Future stop() async {} + Future enableSpeakerphone(bool enable) async {} +} + +class MediaStream { + String get id => throw UnimplementedError(); + List getAudioTracks() => throw UnimplementedError(); + List getVideoTracks() => throw UnimplementedError(); + List getTracks() => throw UnimplementedError(); + Future dispose() async {} +} + +class RTCPeerConnection { + Function(RTCTrackEvent event)? onTrack; + Function()? onRenegotiationNeeded; + Function(RTCIceCandidate)? onIceCandidate; + Function(dynamic state)? onIceGatheringState; + Function(dynamic state)? onIceConnectionState; + + Future createOffer(Map constraints) { + throw UnimplementedError(); + } + + Future createAnswer(Map constraints) { + throw UnimplementedError(); + } + + Future setRemoteDescription(RTCSessionDescription description) async {} + Future setLocalDescription(RTCSessionDescription description) async {} + + Future addTrack( + MediaStreamTrack track, MediaStream stream) async { + return RTCRtpSender(); + } + + Future removeTrack(RTCRtpSender sender) async {} + + Future close() async {} + + Future dispose() async {} + + Future addIceCandidate(RTCIceCandidate candidate) async {} + + Future addStream(MediaStream stream) async {} + + Future removeStream(MediaStream stream) async {} + + Future> getTransceivers() async { + throw UnimplementedError(); + } + + Future> getSenders() async { + throw UnimplementedError(); + } + + Future addCandidate(RTCIceCandidate candidate) async {} + + dynamic get signalingState => throw UnimplementedError(); +} + +class RTCIceCandidate { + String get candidate => throw UnimplementedError(); + String get sdpMid => throw UnimplementedError(); + int get sdpMLineIndex => throw UnimplementedError(); + Map toMap() => throw UnimplementedError(); + RTCIceCandidate(String candidate, String sdpMid, int sdpMLineIndex); +} + +class RTCRtpSender { + MediaStreamTrack? get track => throw UnimplementedError(); + DtmfSender get dtmfSender => throw UnimplementedError(); +} + +class RTCSessionDescription { + late String type; + late String sdp; + RTCSessionDescription(this.sdp, this.type); +} + +class RTCTrackEvent { + late List streams; +} + +enum TransceiverDirection { + SendRecv, + SendOnly, + RecvOnly, + Inactive, +} + +enum RTCSignalingState { RTCSignalingStateStable } + +class RTCVideoRenderer {} + +const kIsWeb = false; + +bool get kIsMobile => !kIsWeb && (Platform.isAndroid || Platform.isIOS); + +class Helper { + static Future switchCamera(MediaStreamTrack track) async {} +} + +class DtmfSender { + Future insertDTMF(String tones) async {} +} + +Future createPeerConnection( + Map constraints) async { + throw UnimplementedError(); +} diff --git a/pubspec.yaml b/pubspec.yaml index 9ac08b2f..4d236c92 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -24,7 +24,6 @@ dependencies: html: ^0.15.0 collection: ^1.15.0 sdp_transform: ^0.3.2 - flutter_webrtc: ^0.7.1 dev_dependencies: dart_code_metrics: ^4.4.0 From 93b623f2d5b7cdae5dd9f5b1cdfefebda8f48b32 Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Fri, 19 Nov 2021 17:51:38 +0800 Subject: [PATCH 04/15] chore: Use webrtc interface to build the voip module. --- lib/src/voip.dart | 136 ++++++++++++++++++++----------------- lib/src/voip_abstract.dart | 116 ------------------------------- pubspec.yaml | 1 + 3 files changed, 76 insertions(+), 177 deletions(-) delete mode 100644 lib/src/voip_abstract.dart diff --git a/lib/src/voip.dart b/lib/src/voip.dart index 6d82ba5a..491c33dc 100644 --- a/lib/src/voip.dart +++ b/lib/src/voip.dart @@ -1,11 +1,57 @@ import 'dart:async'; import 'dart:core'; -import 'voip_abstract.dart'; +import 'package:webrtc_interface/webrtc_interface.dart'; import 'package:sdp_transform/sdp_transform.dart' as sdp_transform; import '../matrix.dart'; +MediaDevices mediaDevices = Null as MediaDevices; +RTCFactory factory = Null as RTCFactory; + +class RTCVideoRenderer extends VideoRenderer { + RTCVideoRenderer() : super() { + muted = true; + } + + @override + late bool muted; + + @override + MediaStream? srcObject; + + @override + Future audioOutput(String deviceId) { + // TODO: implement audioOutput + throw UnimplementedError(); + } + + @override + Future initialize() { + // TODO: implement initialize + throw UnimplementedError(); + } + + @override + // TODO: implement renderVideo + bool get renderVideo => throw UnimplementedError(); + + @override + // TODO: implement textureId + int? get textureId => throw UnimplementedError(); + + @override + // TODO: implement videoHeight + int get videoHeight => throw UnimplementedError(); + + @override + // TODO: implement videoWidth + int get videoWidth => throw UnimplementedError(); + + @override + Future dispose() => throw UnimplementedError(); +} + /// The default life time for call events, in millisecond. const lifetimeMs = 10 * 1000; @@ -393,7 +439,7 @@ class CallSession { try { await pc!.setRemoteDescription(description); if (description.type == 'offer') { - final answer = await pc!.createAnswer(); + final answer = await pc!.createAnswer({}); await room.sendCallNegotiate( callId, lifetimeMs, localPartyId, answer.sdp!, type: answer.type!); @@ -555,7 +601,8 @@ class CallSession { if (purpose == SDPStreamMetadataPurpose.Usermedia) { speakerOn = type == CallType.kVideo; - if (!kIsWeb && !voip.background) { + //TODO: Confirm that the platform is not Web. + if (/*!kIsWeb && */ !voip.background) { final audioTrack = stream.getAudioTracks()[0]; audioTrack.enableSpeakerphone(speakerOn); } @@ -658,13 +705,16 @@ class CallSession { speakerOn = !speakerOn; } + //TODO: move to the app. Future switchCamera() async { if (localUserMediaStream != null) { + /* await Helper.switchCamera( localUserMediaStream!.stream!.getVideoTracks()[0]); if (kIsMobile) { facingMode == 'user' ? facingMode = 'environment' : facingMode = 'user'; } + */ } } @@ -960,7 +1010,7 @@ class CallSession { : false, }; try { - return await navigator.mediaDevices.getUserMedia(mediaConstraints); + return await mediaDevices.getUserMedia(mediaConstraints); } catch (e) { _getUserMediaFailed(e); } @@ -973,7 +1023,7 @@ class CallSession { 'video': true, }; try { - return await navigator.mediaDevices.getDisplayMedia(mediaConstraints); + return await mediaDevices.getDisplayMedia(mediaConstraints); } catch (e) { _getUserMediaFailed(e); } @@ -985,7 +1035,7 @@ class CallSession { 'iceServers': opts.iceServers, 'sdpSemantics': 'unified-plan' }; - final pc = await createPeerConnection(configuration); + final pc = await factory.createPeerConnection(configuration); pc.onTrack = (RTCTrackEvent event) { if (event.streams.isNotEmpty) { final stream = event.streams[0]; @@ -1115,47 +1165,16 @@ class CallSession { } } -class VoIP with WidgetsBindingObserver { +class VoIP { TurnServerCredentials? _turnServerCredentials; Map calls = {}; String? currentCID; - //ConnectivityResult _currentConnectivity; - Function(CallSession session)? onIncomingCall; - OverlayEntry? overlayEntry; + Function(CallSession session)? onNewCall; + Function(CallSession session)? onCallEnded; String? get localPartyId => client.deviceID; bool background = false; Client client; - @override - void didChangeAppLifecycleState(AppLifecycleState state) { - Logs().v('AppLifecycleState = $state'); - background = !(state != AppLifecycleState.detached && - state != AppLifecycleState.paused); - } - - void addCallingOverlay( - BuildContext context, String callId, CallSession call) { - if (overlayEntry != null) { - Logs().w('[VOIP] addCallingOverlay: The call session already exists?'); - overlayEntry?.remove(); - } - /* TODO: - final overlay = Overlay.of(context); - overlayEntry = OverlayEntry( - builder: (_) => Calling( - context: famedly.context, - client: client, - callId: callId, - call: call, - onClear: () { - overlayEntry?.remove(); - overlayEntry = null; - }), - ); - overlay.insert(overlayEntry); - */ - } - VoIP(this.client) : super() { client.onCallInvite.stream.listen(onCallInvite); client.onCallAnswer.stream.listen(onCallAnswer); @@ -1168,19 +1187,19 @@ class VoIP with WidgetsBindingObserver { client.onSDPStreamMetadataChangedReceived.stream .listen(onSDPStreamMetadataChangedReceived); client.onAssertedIdentityReceived.stream.listen(onAssertedIdentityReceived); - /* - Connectivity().onConnectivityChanged.listen(_handleNetworkChanged); - Connectivity() - .checkConnectivity() - .then((result) => _currentConnectivity = result) - .catchError((e) => _currentConnectivity = ConnectivityResult.none); - */ - if (!kIsWeb) { - final wb = WidgetsBinding.instance; - wb!.addObserver(this); - didChangeAppLifecycleState(wb.lifecycleState!); - } + /* TODO: implement this in the fanedly-app. + Connectivity().onConnectivityChanged.listen(_handleNetworkChanged); + Connectivity() + .checkConnectivity() + .then((result) => _currentConnectivity = result) + .catchError((e) => _currentConnectivity = ConnectivityResult.none); + if (!kIsWeb) { + final wb = WidgetsBinding.instance; + wb!.addObserver(this); + didChangeAppLifecycleState(wb.lifecycleState!); + } + */ } Future onCallInvite(Event event) async { @@ -1254,8 +1273,7 @@ class VoIP with WidgetsBindingObserver { .then((_) { // Popup CallingPage for incoming call. if (!background) { - //TODO: - //addCallingOverlay(famedly.context, callId, newCall); + onNewCall?.call(newCall); } }); currentCID = callId; @@ -1264,8 +1282,7 @@ class VoIP with WidgetsBindingObserver { /// Forced to enable signaling synchronization until the end of the call. client.backgroundSync = true; - /// Handle incoming call for callkeep plugin. - onIncomingCall?.call(newCall); + ///TODO: notify the callkeep that the call is incoming. } // Play ringtone playRingtone(); @@ -1352,9 +1369,7 @@ class VoIP with WidgetsBindingObserver { // hangup in any case, either if the other party hung up or we did on another device call.terminate(CallParty.kRemote, event.content['reason'] ?? CallErrorCode.UserHangup, true); - - overlayEntry?.remove(); - overlayEntry = null; + onCallEnded?.call(call); } else { Logs().v('[VOIP] onCallHangup: Session [$callId] not found!'); } @@ -1534,8 +1549,7 @@ class VoIP with WidgetsBindingObserver { currentCID = callId; await newCall.initOutboundCall(type).then((_) { if (!background) { - //TODO: - //addCallingOverlay(famdly.context, callId, newCall); + onNewCall?.call(newCall); } }); currentCID = callId; diff --git a/lib/src/voip_abstract.dart b/lib/src/voip_abstract.dart deleted file mode 100644 index 392e5d36..00000000 --- a/lib/src/voip_abstract.dart +++ /dev/null @@ -1,116 +0,0 @@ -import 'dart:io'; - -class MediaStreamTrack { - bool get enabled => false; - set enabled(bool value) {} - String get kind => throw UnimplementedError(); - Future stop() async {} - Future enableSpeakerphone(bool enable) async {} -} - -class MediaStream { - String get id => throw UnimplementedError(); - List getAudioTracks() => throw UnimplementedError(); - List getVideoTracks() => throw UnimplementedError(); - List getTracks() => throw UnimplementedError(); - Future dispose() async {} -} - -class RTCPeerConnection { - Function(RTCTrackEvent event)? onTrack; - Function()? onRenegotiationNeeded; - Function(RTCIceCandidate)? onIceCandidate; - Function(dynamic state)? onIceGatheringState; - Function(dynamic state)? onIceConnectionState; - - Future createOffer(Map constraints) { - throw UnimplementedError(); - } - - Future createAnswer(Map constraints) { - throw UnimplementedError(); - } - - Future setRemoteDescription(RTCSessionDescription description) async {} - Future setLocalDescription(RTCSessionDescription description) async {} - - Future addTrack( - MediaStreamTrack track, MediaStream stream) async { - return RTCRtpSender(); - } - - Future removeTrack(RTCRtpSender sender) async {} - - Future close() async {} - - Future dispose() async {} - - Future addIceCandidate(RTCIceCandidate candidate) async {} - - Future addStream(MediaStream stream) async {} - - Future removeStream(MediaStream stream) async {} - - Future> getTransceivers() async { - throw UnimplementedError(); - } - - Future> getSenders() async { - throw UnimplementedError(); - } - - Future addCandidate(RTCIceCandidate candidate) async {} - - dynamic get signalingState => throw UnimplementedError(); -} - -class RTCIceCandidate { - String get candidate => throw UnimplementedError(); - String get sdpMid => throw UnimplementedError(); - int get sdpMLineIndex => throw UnimplementedError(); - Map toMap() => throw UnimplementedError(); - RTCIceCandidate(String candidate, String sdpMid, int sdpMLineIndex); -} - -class RTCRtpSender { - MediaStreamTrack? get track => throw UnimplementedError(); - DtmfSender get dtmfSender => throw UnimplementedError(); -} - -class RTCSessionDescription { - late String type; - late String sdp; - RTCSessionDescription(this.sdp, this.type); -} - -class RTCTrackEvent { - late List streams; -} - -enum TransceiverDirection { - SendRecv, - SendOnly, - RecvOnly, - Inactive, -} - -enum RTCSignalingState { RTCSignalingStateStable } - -class RTCVideoRenderer {} - -const kIsWeb = false; - -bool get kIsMobile => !kIsWeb && (Platform.isAndroid || Platform.isIOS); - -class Helper { - static Future switchCamera(MediaStreamTrack track) async {} -} - -class DtmfSender { - Future insertDTMF(String tones) async {} -} - -Future createPeerConnection( - Map constraints) async { - throw UnimplementedError(); -} diff --git a/pubspec.yaml b/pubspec.yaml index 4d236c92..0af2b3d6 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -23,6 +23,7 @@ dependencies: slugify: ^2.0.0 html: ^0.15.0 collection: ^1.15.0 + webrtc_interface: ^1.0.0 sdp_transform: ^0.3.2 dev_dependencies: From 5dfb196c9018b3e7b03c5c5f51037f632c67ab60 Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Fri, 19 Nov 2021 18:03:58 +0800 Subject: [PATCH 05/15] chore: dart format. --- lib/src/voip.dart | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/src/voip.dart b/lib/src/voip.dart index 491c33dc..35a0c4c3 100644 --- a/lib/src/voip.dart +++ b/lib/src/voip.dart @@ -49,7 +49,7 @@ class RTCVideoRenderer extends VideoRenderer { int get videoWidth => throw UnimplementedError(); @override - Future dispose() => throw UnimplementedError(); + Future dispose() => throw UnimplementedError(); } /// The default life time for call events, in millisecond. @@ -1199,7 +1199,7 @@ class VoIP { wb!.addObserver(this); didChangeAppLifecycleState(wb.lifecycleState!); } - */ + */ } Future onCallInvite(Event event) async { From 12df5d8cdde524c8690feede1e26e669e6bdedb4 Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Mon, 22 Nov 2021 22:41:49 +0800 Subject: [PATCH 06/15] chore: Use RTCFactory interface to create MediaStream, VideoRenderer. --- lib/src/voip.dart | 80 ++++++++++------------------------------------- 1 file changed, 17 insertions(+), 63 deletions(-) diff --git a/lib/src/voip.dart b/lib/src/voip.dart index 35a0c4c3..a4406d83 100644 --- a/lib/src/voip.dart +++ b/lib/src/voip.dart @@ -6,52 +6,6 @@ import 'package:sdp_transform/sdp_transform.dart' as sdp_transform; import '../matrix.dart'; -MediaDevices mediaDevices = Null as MediaDevices; -RTCFactory factory = Null as RTCFactory; - -class RTCVideoRenderer extends VideoRenderer { - RTCVideoRenderer() : super() { - muted = true; - } - - @override - late bool muted; - - @override - MediaStream? srcObject; - - @override - Future audioOutput(String deviceId) { - // TODO: implement audioOutput - throw UnimplementedError(); - } - - @override - Future initialize() { - // TODO: implement initialize - throw UnimplementedError(); - } - - @override - // TODO: implement renderVideo - bool get renderVideo => throw UnimplementedError(); - - @override - // TODO: implement textureId - int? get textureId => throw UnimplementedError(); - - @override - // TODO: implement videoHeight - int get videoHeight => throw UnimplementedError(); - - @override - // TODO: implement videoWidth - int get videoWidth => throw UnimplementedError(); - - @override - Future dispose() => throw UnimplementedError(); -} - /// The default life time for call events, in millisecond. const lifetimeMs = 10 * 1000; @@ -72,13 +26,14 @@ class WrappedMediaStream { /// for debug String get title => '$displayName:$purpose:a[$audioMuted]:v[$videoMuted]'; - RTCVideoRenderer? renderer; + final VideoRenderer renderer; bool stopped = false; void Function(bool audioMuted, bool videoMuted)? onMuteStateChanged; void Function(MediaStream stream)? onNewStream; WrappedMediaStream( {this.stream, + required this.renderer, required this.room, required this.userId, required this.purpose, @@ -88,23 +43,17 @@ class WrappedMediaStream { /// Initialize the video renderer Future initialize() async { - if (renderer == null) { - renderer = RTCVideoRenderer(); - await renderer?.initialize(); - } - renderer?.srcObject = stream; - renderer?.onResize = () { + await renderer.initialize(); + renderer.srcObject = stream; + renderer.onResize = () { Logs().i( 'onResize [${stream!.id.substring(0, 8)}] ${renderer?.videoWidth} x ${renderer?.videoHeight}'); }; } Future dispose() async { - if (renderer != null) { - renderer?.srcObject = null; - await renderer?.dispose(); - renderer = null; - } + renderer.srcObject = null; + await renderer.dispose(); if (isLocal() && stream != null) { await stream?.dispose(); @@ -135,7 +84,7 @@ class WrappedMediaStream { void setNewStream(MediaStream newStream) { stream = newStream; - renderer?.srcObject = stream; + renderer.srcObject = stream; if (onNewStream != null) { onNewStream?.call(stream!); } @@ -571,6 +520,7 @@ class CallSession { existingStream.setNewStream(stream); } else { final newStream = WrappedMediaStream( + renderer: voip.factory.videoRenderer(), userId: client.userID!, room: opts.room, stream: stream, @@ -631,6 +581,7 @@ class CallSession { existingStream.setNewStream(stream); } else { final newStream = WrappedMediaStream( + renderer: voip.factory.videoRenderer(), userId: remoteUser.id, room: opts.room, stream: stream, @@ -1010,7 +961,8 @@ class CallSession { : false, }; try { - return await mediaDevices.getUserMedia(mediaConstraints); + return await voip.factory.navigator.mediaDevices + .getUserMedia(mediaConstraints); } catch (e) { _getUserMediaFailed(e); } @@ -1023,7 +975,8 @@ class CallSession { 'video': true, }; try { - return await mediaDevices.getDisplayMedia(mediaConstraints); + return await voip.factory.navigator.mediaDevices + .getDisplayMedia(mediaConstraints); } catch (e) { _getUserMediaFailed(e); } @@ -1035,7 +988,7 @@ class CallSession { 'iceServers': opts.iceServers, 'sdpSemantics': 'unified-plan' }; - final pc = await factory.createPeerConnection(configuration); + final pc = await voip.factory.createPeerConnection(configuration); pc.onTrack = (RTCTrackEvent event) { if (event.streams.isNotEmpty) { final stream = event.streams[0]; @@ -1174,8 +1127,9 @@ class VoIP { String? get localPartyId => client.deviceID; bool background = false; Client client; + final RTCFactory factory; - VoIP(this.client) : super() { + VoIP(this.client, this.factory) : super() { client.onCallInvite.stream.listen(onCallInvite); client.onCallAnswer.stream.listen(onCallAnswer); client.onCallCandidates.stream.listen(onCallCandidates); From 18af7e0642be0cfedc68ca98e4f25a83e22c98f5 Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Mon, 22 Nov 2021 23:08:52 +0800 Subject: [PATCH 07/15] chore: Set client to final. --- lib/src/voip.dart | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/src/voip.dart b/lib/src/voip.dart index a4406d83..b4352049 100644 --- a/lib/src/voip.dart +++ b/lib/src/voip.dart @@ -1126,7 +1126,7 @@ class VoIP { Function(CallSession session)? onCallEnded; String? get localPartyId => client.deviceID; bool background = false; - Client client; + final Client client; final RTCFactory factory; VoIP(this.client, this.factory) : super() { From 72584d679cb022cc45b251303aabd909b1acdece Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Fri, 26 Nov 2021 20:19:37 +0800 Subject: [PATCH 08/15] chore: bump version for webrtc_interface. --- lib/matrix.dart | 2 ++ lib/src/voip.dart | 42 +++++++++++------------------------------- pubspec.yaml | 2 +- 3 files changed, 14 insertions(+), 32 deletions(-) diff --git a/lib/matrix.dart b/lib/matrix.dart index bbd34d5a..71c2671d 100644 --- a/lib/matrix.dart +++ b/lib/matrix.dart @@ -26,6 +26,8 @@ export 'src/database/database_api.dart'; export 'src/database/hive_database.dart'; export 'src/event.dart'; export 'src/event_status.dart'; +export 'src/voip.dart'; +export 'src/voip_content.dart'; export 'src/room.dart'; export 'src/timeline.dart'; export 'src/user.dart'; diff --git a/lib/src/voip.dart b/lib/src/voip.dart index b4352049..982890e8 100644 --- a/lib/src/voip.dart +++ b/lib/src/voip.dart @@ -26,14 +26,12 @@ class WrappedMediaStream { /// for debug String get title => '$displayName:$purpose:a[$audioMuted]:v[$videoMuted]'; - final VideoRenderer renderer; bool stopped = false; void Function(bool audioMuted, bool videoMuted)? onMuteStateChanged; void Function(MediaStream stream)? onNewStream; WrappedMediaStream( {this.stream, - required this.renderer, required this.room, required this.userId, required this.purpose, @@ -41,20 +39,7 @@ class WrappedMediaStream { required this.audioMuted, required this.videoMuted}); - /// Initialize the video renderer - Future initialize() async { - await renderer.initialize(); - renderer.srcObject = stream; - renderer.onResize = () { - Logs().i( - 'onResize [${stream!.id.substring(0, 8)}] ${renderer?.videoWidth} x ${renderer?.videoHeight}'); - }; - } - Future dispose() async { - renderer.srcObject = null; - await renderer.dispose(); - if (isLocal() && stream != null) { await stream?.dispose(); stream = null; @@ -84,7 +69,6 @@ class WrappedMediaStream { void setNewStream(MediaStream newStream) { stream = newStream; - renderer.srcObject = stream; if (onNewStream != null) { onNewStream?.call(stream!); } @@ -232,7 +216,7 @@ enum CallEvent { enum CallType { kVoice, kVideo } -enum Direction { kIncoming, kOutgoing } +enum CallDirection { kIncoming, kOutgoing } enum CallParty { kLocal, kRemote } @@ -240,7 +224,7 @@ enum CallParty { kLocal, kRemote } class CallOptions { late String callId; late CallType type; - late Direction dir; + late CallDirection dir; late String localPartyId; late VoIP voip; late Room room; @@ -257,9 +241,9 @@ class CallSession { String get callId => opts.callId; String get localPartyId => opts.localPartyId; String? get displayName => room.displayname; - Direction get direction => opts.dir; + CallDirection get direction => opts.dir; CallState state = CallState.kFledgling; - bool get isOutgoing => direction == Direction.kOutgoing; + bool get isOutgoing => direction == CallDirection.kOutgoing; bool get isRinging => state == CallState.kRinging; RTCPeerConnection? pc; List remoteCandidates = []; @@ -356,7 +340,7 @@ class CallSession { _updateRemoteSDPStreamMetadata(metadata); } - if (direction == Direction.kOutgoing) { + if (direction == CallDirection.kOutgoing) { setCallState(CallState.kConnecting); await pc!.setRemoteDescription(answer); remoteCandidates.forEach((candidate) => pc!.addCandidate(candidate)); @@ -365,7 +349,7 @@ class CallSession { void onNegotiateReceived( SDPStreamMetadata? metadata, RTCSessionDescription description) async { - final polite = direction == Direction.kIncoming; + final polite = direction == CallDirection.kIncoming; // Here we follow the perfect negotiation logic from // https://developer.mozilla.org/en-US/docs/Web/API/WebRTC_API/Perfect_negotiation @@ -520,7 +504,6 @@ class CallSession { existingStream.setNewStream(stream); } else { final newStream = WrappedMediaStream( - renderer: voip.factory.videoRenderer(), userId: client.userID!, room: opts.room, stream: stream, @@ -529,7 +512,6 @@ class CallSession { audioMuted: stream.getAudioTracks().isEmpty, videoMuted: stream.getVideoTracks().isEmpty, ); - await newStream.initialize(); streams[stream.id] = newStream; emit(CallEvent.kFeedsChanged, streams); } @@ -581,7 +563,6 @@ class CallSession { existingStream.setNewStream(stream); } else { final newStream = WrappedMediaStream( - renderer: voip.factory.videoRenderer(), userId: remoteUser.id, room: opts.room, stream: stream, @@ -590,7 +571,6 @@ class CallSession { audioMuted: audioMuted, videoMuted: videoMuted, ); - await newStream.initialize(); streams[stream.id] = newStream; } emit(CallEvent.kFeedsChanged, streams); @@ -676,7 +656,7 @@ class CallSession { // stop play ringtone voip.stopRingTone(); - if (direction == Direction.kIncoming) { + if (direction == CallDirection.kIncoming) { setCallState(CallState.kCreateAnswer); final answer = await pc!.createAnswer({}); @@ -773,7 +753,7 @@ class CallSession { // No need to check party_id for reject because if we'd received either // an answer or reject, we wouldn't be in state InviteSent final shouldTerminate = - (state == CallState.kFledgling && direction == Direction.kIncoming) || + (state == CallState.kFledgling && direction == CallDirection.kIncoming) || CallState.kInviteSent == state || CallState.kRinging == state; @@ -1099,7 +1079,7 @@ class CallSession { } void onSelectAnswerReceived(String? selectedPartyId) { - if (direction != Direction.kIncoming) { + if (direction != CallDirection.kIncoming) { Logs().w('Got select_answer for an outbound call: ignoring'); return; } @@ -1209,7 +1189,7 @@ class VoIP { final opts = CallOptions() ..voip = this ..callId = callId - ..dir = Direction.kIncoming + ..dir = CallDirection.kIncoming ..type = callType ..room = event.room ..localPartyId = localPartyId! @@ -1493,7 +1473,7 @@ class VoIP { final opts = CallOptions() ..callId = callId ..type = type - ..dir = Direction.kOutgoing + ..dir = CallDirection.kOutgoing ..room = room ..voip = this ..localPartyId = localPartyId! diff --git a/pubspec.yaml b/pubspec.yaml index 0af2b3d6..846db442 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -23,7 +23,7 @@ dependencies: slugify: ^2.0.0 html: ^0.15.0 collection: ^1.15.0 - webrtc_interface: ^1.0.0 + webrtc_interface: ^1.0.1 sdp_transform: ^0.3.2 dev_dependencies: From 60618d1775e3385640c87ee37bd632821543bff6 Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Sat, 27 Nov 2021 01:20:04 +0800 Subject: [PATCH 09/15] chore: Add WebRTC delegate interface. --- lib/src/voip.dart | 34 ++++++++++++++++++++++++++++------ 1 file changed, 28 insertions(+), 6 deletions(-) diff --git a/lib/src/voip.dart b/lib/src/voip.dart index 982890e8..b136fc17 100644 --- a/lib/src/voip.dart +++ b/lib/src/voip.dart @@ -6,6 +6,11 @@ import 'package:sdp_transform/sdp_transform.dart' as sdp_transform; import '../matrix.dart'; +abstract class WebRTCDelegate { + RTCFactory get rtcFactory; + VideoRenderer createRenderer(); +} + /// The default life time for call events, in millisecond. const lifetimeMs = 10 * 1000; @@ -23,6 +28,7 @@ class WrappedMediaStream { bool audioMuted; bool videoMuted; final Client client; + VideoRenderer renderer; /// for debug String get title => '$displayName:$purpose:a[$audioMuted]:v[$videoMuted]'; @@ -32,6 +38,7 @@ class WrappedMediaStream { WrappedMediaStream( {this.stream, + required this.renderer, required this.room, required this.userId, required this.purpose, @@ -39,7 +46,18 @@ class WrappedMediaStream { required this.audioMuted, required this.videoMuted}); + /// Initialize the video renderer + Future initialize() async { + await renderer.initialize(); + renderer.srcObject = stream; + renderer.onResize = () { + Logs().i( + 'onResize [${stream!.id.substring(0, 8)}] ${renderer?.videoWidth} x ${renderer?.videoHeight}'); + }; + } + Future dispose() async { + renderer.srcObject = null; if (isLocal() && stream != null) { await stream?.dispose(); stream = null; @@ -69,6 +87,7 @@ class WrappedMediaStream { void setNewStream(MediaStream newStream) { stream = newStream; + renderer.srcObject = stream; if (onNewStream != null) { onNewStream?.call(stream!); } @@ -504,6 +523,7 @@ class CallSession { existingStream.setNewStream(stream); } else { final newStream = WrappedMediaStream( + renderer: voip.delegate.createRenderer(), userId: client.userID!, room: opts.room, stream: stream, @@ -563,6 +583,7 @@ class CallSession { existingStream.setNewStream(stream); } else { final newStream = WrappedMediaStream( + renderer: voip.delegate.createRenderer(), userId: remoteUser.id, room: opts.room, stream: stream, @@ -752,10 +773,10 @@ class CallSession { Logs().v('[VOIP] Reject received for call ID ' + callId); // No need to check party_id for reject because if we'd received either // an answer or reject, we wouldn't be in state InviteSent - final shouldTerminate = - (state == CallState.kFledgling && direction == CallDirection.kIncoming) || - CallState.kInviteSent == state || - CallState.kRinging == state; + final shouldTerminate = (state == CallState.kFledgling && + direction == CallDirection.kIncoming) || + CallState.kInviteSent == state || + CallState.kRinging == state; if (shouldTerminate) { terminate(CallParty.kRemote, reason ?? CallErrorCode.UserHangup, true); @@ -1107,9 +1128,10 @@ class VoIP { String? get localPartyId => client.deviceID; bool background = false; final Client client; - final RTCFactory factory; + RTCFactory get factory => delegate.rtcFactory; + final WebRTCDelegate delegate; - VoIP(this.client, this.factory) : super() { + VoIP(this.client, this.delegate) : super() { client.onCallInvite.stream.listen(onCallInvite); client.onCallAnswer.stream.listen(onCallAnswer); client.onCallCandidates.stream.listen(onCallCandidates); From 01276bbf605f02e0cb92958673c4a99264bc692d Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Sat, 27 Nov 2021 01:29:11 +0800 Subject: [PATCH 10/15] chore: add more interface for delegate. --- lib/src/voip.dart | 71 +++++++++++++---------------------------------- 1 file changed, 19 insertions(+), 52 deletions(-) diff --git a/lib/src/voip.dart b/lib/src/voip.dart index b136fc17..cc336e42 100644 --- a/lib/src/voip.dart +++ b/lib/src/voip.dart @@ -8,7 +8,13 @@ import '../matrix.dart'; abstract class WebRTCDelegate { RTCFactory get rtcFactory; + bool get isBackgroud; + bool get isWeb; VideoRenderer createRenderer(); + void playRingtone(); + void stopRingtone(); + Function(CallSession session)? onNewCall; + Function(CallSession session)? onCallEnded; } /// The default life time for call events, in millisecond. @@ -553,8 +559,7 @@ class CallSession { if (purpose == SDPStreamMetadataPurpose.Usermedia) { speakerOn = type == CallType.kVideo; - //TODO: Confirm that the platform is not Web. - if (/*!kIsWeb && */ !voip.background) { + if (voip.delegate.isWeb && !voip.delegate.isBackgroud) { final audioTrack = stream.getAudioTracks()[0]; audioTrack.enableSpeakerphone(speakerOn); } @@ -653,29 +658,12 @@ class CallSession { return callOnHold; } - void setSpeakerOn() { - speakerOn = !speakerOn; - } - - //TODO: move to the app. - Future switchCamera() async { - if (localUserMediaStream != null) { - /* - await Helper.switchCamera( - localUserMediaStream!.stream!.getVideoTracks()[0]); - if (kIsMobile) { - facingMode == 'user' ? facingMode = 'environment' : facingMode = 'user'; - } - */ - } - } - void answer() async { if (inviteOrAnswerSent) { return; } // stop play ringtone - voip.stopRingTone(); + voip.delegate.stopRingtone(); if (direction == CallDirection.kIncoming) { setCallState(CallState.kCreateAnswer); @@ -722,7 +710,7 @@ class CallSession { void hangup([String? reason, bool suppressEvent = true]) async { // stop play ringtone - voip.stopRingTone(); + voip.delegate.stopRingtone(); terminate( CallParty.kLocal, reason ?? CallErrorCode.UserHangup, !suppressEvent); @@ -1123,10 +1111,7 @@ class VoIP { TurnServerCredentials? _turnServerCredentials; Map calls = {}; String? currentCID; - Function(CallSession session)? onNewCall; - Function(CallSession session)? onCallEnded; String? get localPartyId => client.deviceID; - bool background = false; final Client client; RTCFactory get factory => delegate.rtcFactory; final WebRTCDelegate delegate; @@ -1228,38 +1213,20 @@ class VoIP { .initWithInvite(callType, offer, sdpStreamMetadata, lifetime) .then((_) { // Popup CallingPage for incoming call. - if (!background) { - onNewCall?.call(newCall); + if (!delegate.isBackgroud) { + delegate.onNewCall?.call(newCall); } }); currentCID = callId; - if (background) { + if (delegate.isBackgroud) { /// Forced to enable signaling synchronization until the end of the call. client.backgroundSync = true; ///TODO: notify the callkeep that the call is incoming. } // Play ringtone - playRingtone(); - } - - void playRingtone() async { - if (!background) { - try { - // TODO: callback the event to the user. - // await UserMediaManager().startRinginTone(); - } catch (_) {} - } - } - - void stopRingTone() async { - if (!background) { - try { - // TODO: - // await UserMediaManager().stopRingingTone(); - } catch (_) {} - } + delegate.playRingtone(); } void onCallAnswer(Event event) async { @@ -1272,7 +1239,7 @@ class VoIP { if (event.senderId == client.userID) { // Ignore messages to yourself. if (!call._answeredByUs) { - stopRingTone(); + delegate.stopRingtone(); } return; } @@ -1315,8 +1282,8 @@ class VoIP { void onCallHangup(Event event) async { // stop play ringtone, if this is an incoming call - if (!background) { - stopRingTone(); + if (!delegate.isBackgroud) { + delegate.stopRingtone(); } Logs().v('[VOIP] onCallHangup => ${event.content.toString()}'); final String callId = event.content['call_id']; @@ -1325,7 +1292,7 @@ class VoIP { // hangup in any case, either if the other party hung up or we did on another device call.terminate(CallParty.kRemote, event.content['reason'] ?? CallErrorCode.UserHangup, true); - onCallEnded?.call(call); + delegate.onCallEnded?.call(call); } else { Logs().v('[VOIP] onCallHangup: Session [$callId] not found!'); } @@ -1504,8 +1471,8 @@ class VoIP { final newCall = createNewCall(opts); currentCID = callId; await newCall.initOutboundCall(type).then((_) { - if (!background) { - onNewCall?.call(newCall); + if (!delegate.isBackgroud) { + delegate.onNewCall?.call(newCall); } }); currentCID = callId; From edeea47decbf01684aa29d97efce851822656050 Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Mon, 29 Nov 2021 22:26:18 +0800 Subject: [PATCH 11/15] chore: Improve the code. --- lib/src/voip.dart | 59 +++++++++++++++-------------------------------- 1 file changed, 19 insertions(+), 40 deletions(-) diff --git a/lib/src/voip.dart b/lib/src/voip.dart index cc336e42..15eeeda5 100644 --- a/lib/src/voip.dart +++ b/lib/src/voip.dart @@ -6,15 +6,20 @@ import 'package:sdp_transform/sdp_transform.dart' as sdp_transform; import '../matrix.dart'; +/// Delegate WebRTC basic functionality. abstract class WebRTCDelegate { - RTCFactory get rtcFactory; - bool get isBackgroud; - bool get isWeb; + MediaDevices get mediaDevices; + Future createPeerConnection( + Map configuration, + [Map constraints]); VideoRenderer createRenderer(); void playRingtone(); void stopRingtone(); - Function(CallSession session)? onNewCall; - Function(CallSession session)? onCallEnded; + void handleNewCall(CallSession session); + void handleCallEnded(CallSession session); + + bool get isBackgroud; + bool get isWeb; } /// The default life time for call events, in millisecond. @@ -58,7 +63,7 @@ class WrappedMediaStream { renderer.srcObject = stream; renderer.onResize = () { Logs().i( - 'onResize [${stream!.id.substring(0, 8)}] ${renderer?.videoWidth} x ${renderer?.videoHeight}'); + 'onResize [${stream!.id.substring(0, 8)}] ${renderer.videoWidth} x ${renderer.videoHeight}'); }; } @@ -467,7 +472,7 @@ class CallSession { if (pc != null && pc!.iceConnectionState == RTCIceConnectionState.RTCIceConnectionStateDisconnected) { - _restartIce(); + restartIce(); } } @@ -924,7 +929,7 @@ class CallSession { return metadata; } - void _restartIce() async { + void restartIce() async { Logs().v('[VOIP] iceRestart.'); // Needs restart ice on session.pc and renegotiation. iceGatheringFinished = false; @@ -950,8 +955,7 @@ class CallSession { : false, }; try { - return await voip.factory.navigator.mediaDevices - .getUserMedia(mediaConstraints); + return await voip.delegate.mediaDevices.getUserMedia(mediaConstraints); } catch (e) { _getUserMediaFailed(e); } @@ -964,8 +968,7 @@ class CallSession { 'video': true, }; try { - return await voip.factory.navigator.mediaDevices - .getDisplayMedia(mediaConstraints); + return await voip.delegate.mediaDevices.getDisplayMedia(mediaConstraints); } catch (e) { _getUserMediaFailed(e); } @@ -977,7 +980,7 @@ class CallSession { 'iceServers': opts.iceServers, 'sdpSemantics': 'unified-plan' }; - final pc = await voip.factory.createPeerConnection(configuration); + final pc = await voip.delegate.createPeerConnection(configuration); pc.onTrack = (RTCTrackEvent event) { if (event.streams.isNotEmpty) { final stream = event.streams[0]; @@ -1113,7 +1116,6 @@ class VoIP { String? currentCID; String? get localPartyId => client.deviceID; final Client client; - RTCFactory get factory => delegate.rtcFactory; final WebRTCDelegate delegate; VoIP(this.client, this.delegate) : super() { @@ -1128,19 +1130,6 @@ class VoIP { client.onSDPStreamMetadataChangedReceived.stream .listen(onSDPStreamMetadataChangedReceived); client.onAssertedIdentityReceived.stream.listen(onAssertedIdentityReceived); - - /* TODO: implement this in the fanedly-app. - Connectivity().onConnectivityChanged.listen(_handleNetworkChanged); - Connectivity() - .checkConnectivity() - .then((result) => _currentConnectivity = result) - .catchError((e) => _currentConnectivity = ConnectivityResult.none); - if (!kIsWeb) { - final wb = WidgetsBinding.instance; - wb!.addObserver(this); - didChangeAppLifecycleState(wb.lifecycleState!); - } - */ } Future onCallInvite(Event event) async { @@ -1214,7 +1203,7 @@ class VoIP { .then((_) { // Popup CallingPage for incoming call. if (!delegate.isBackgroud) { - delegate.onNewCall?.call(newCall); + delegate.handleNewCall(newCall); } }); currentCID = callId; @@ -1292,7 +1281,7 @@ class VoIP { // hangup in any case, either if the other party hung up or we did on another device call.terminate(CallParty.kRemote, event.content['reason'] ?? CallErrorCode.UserHangup, true); - delegate.onCallEnded?.call(call); + delegate.handleCallEnded(call); } else { Logs().v('[VOIP] onCallHangup: Session [$callId] not found!'); } @@ -1441,16 +1430,6 @@ class VoIP { } ]; } - /* - void _handleNetworkChanged(ConnectivityResult result) async { - // Got a new connectivity status! - if (_currentConnectivity != result) { - calls.forEach((_, sess) { - sess._restartIce(); - }); - } - _currentConnectivity = result; - }*/ Future inviteToCall(String roomId, CallType type) async { final room = client.getRoomById(roomId); @@ -1472,7 +1451,7 @@ class VoIP { currentCID = callId; await newCall.initOutboundCall(type).then((_) { if (!delegate.isBackgroud) { - delegate.onNewCall?.call(newCall); + delegate.handleNewCall(newCall); } }); currentCID = callId; From 1f519703a8d5b2a29936c568273935a9372096d7 Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Wed, 1 Dec 2021 00:47:11 +0800 Subject: [PATCH 12/15] fix: Fix the call function in flutter and dart. --- lib/src/voip.dart | 130 ++++++++++++++++++++++++++++++---------------- 1 file changed, 84 insertions(+), 46 deletions(-) diff --git a/lib/src/voip.dart b/lib/src/voip.dart index 15eeeda5..b9a9292c 100644 --- a/lib/src/voip.dart +++ b/lib/src/voip.dart @@ -11,7 +11,7 @@ abstract class WebRTCDelegate { MediaDevices get mediaDevices; Future createPeerConnection( Map configuration, - [Map constraints]); + [Map constraints = const {}]); VideoRenderer createRenderer(); void playRingtone(); void stopRingtone(); @@ -40,6 +40,7 @@ class WrappedMediaStream { bool videoMuted; final Client client; VideoRenderer renderer; + final bool isWeb; /// for debug String get title => '$displayName:$purpose:a[$audioMuted]:v[$videoMuted]'; @@ -55,7 +56,8 @@ class WrappedMediaStream { required this.purpose, required this.client, required this.audioMuted, - required this.videoMuted}); + required this.videoMuted, + required this.isWeb}); /// Initialize the video renderer Future initialize() async { @@ -70,6 +72,11 @@ class WrappedMediaStream { Future dispose() async { renderer.srcObject = null; if (isLocal() && stream != null) { + if (isWeb) { + stream!.getTracks().forEach((element) { + element.stop(); + }); + } await stream?.dispose(); stream = null; } @@ -289,7 +296,7 @@ class CallSession { bool makingOffer = false; bool ignoreOffer = false; String facingMode = 'user'; - late Client client; + Client get client => opts.room.client; String? remotePartyId; late User remoteUser; late CallParty hangupParty; @@ -298,25 +305,48 @@ class CallSession { SDPStreamMetadata? remoteSDPStreamMetadata; List usermediaSenders = []; List screensharingSenders = []; - Map streams = {}; + List streams = []; List get getLocalStreams => - streams.values.where((element) => element.isLocal()).toList(); + streams.where((element) => element.isLocal()).toList(); List get getRemoteStreams => - streams.values.where((element) => !element.isLocal()).toList(); - WrappedMediaStream? get localUserMediaStream => getLocalStreams.firstWhere( - (element) => element.purpose == SDPStreamMetadataPurpose.Usermedia, - orElse: () => Null as WrappedMediaStream); - WrappedMediaStream? get localScreenSharingStream => - getLocalStreams.firstWhere( - (element) => element.purpose == SDPStreamMetadataPurpose.Screenshare, - orElse: () => Null as WrappedMediaStream); - WrappedMediaStream? get remoteUserMediaStream => getRemoteStreams.firstWhere( - (element) => element.purpose == SDPStreamMetadataPurpose.Usermedia, - orElse: () => Null as WrappedMediaStream); - WrappedMediaStream? get remoteScreenSharingStream => - getRemoteStreams.firstWhere( - (element) => element.purpose == SDPStreamMetadataPurpose.Screenshare, - orElse: () => Null as WrappedMediaStream); + streams.where((element) => !element.isLocal()).toList(); + + WrappedMediaStream? get localUserMediaStream { + final stream = getLocalStreams.where( + (element) => element.purpose == SDPStreamMetadataPurpose.Usermedia); + if (stream.isNotEmpty) { + return stream.first; + } + return null; + } + + WrappedMediaStream? get localScreenSharingStream { + final stream = getLocalStreams.where( + (element) => element.purpose == SDPStreamMetadataPurpose.Screenshare); + if (stream.isNotEmpty) { + return stream.first; + } + return null; + } + + WrappedMediaStream? get remoteUserMediaStream { + final stream = getRemoteStreams.where( + (element) => element.purpose == SDPStreamMetadataPurpose.Usermedia); + if (stream.isNotEmpty) { + return stream.first; + } + return null; + } + + WrappedMediaStream? get remoteScreenSharingStream { + final stream = getRemoteStreams.where( + (element) => element.purpose == SDPStreamMetadataPurpose.Screenshare); + if (stream.isNotEmpty) { + return stream.first; + } + return null; + } + final _callStateController = StreamController.broadcast(sync: true); Stream get onCallStateChanged => _callStateController.stream; @@ -520,6 +550,7 @@ class CallSession { await track.stop(); } localScreenSharingStream!.stopped = true; + _removeStream(localScreenSharingStream!.stream!); emit(CallEvent.kFeedsChanged, streams); return false; } @@ -527,11 +558,10 @@ class CallSession { void _addLocalStream(MediaStream stream, String purpose, {bool addToPeerConnection = true}) async { - final WrappedMediaStream? existingStream = getLocalStreams.firstWhere( - (element) => element.purpose == purpose, - orElse: () => Null as WrappedMediaStream); - if (existingStream != null) { - existingStream.setNewStream(stream); + final existingStream = + getLocalStreams.where((element) => element.purpose == purpose); + if (existingStream.isNotEmpty) { + existingStream.first.setNewStream(stream); } else { final newStream = WrappedMediaStream( renderer: voip.delegate.createRenderer(), @@ -542,8 +572,10 @@ class CallSession { client: client, audioMuted: stream.getAudioTracks().isEmpty, videoMuted: stream.getVideoTracks().isEmpty, + isWeb: voip.delegate.isWeb, ); - streams[stream.id] = newStream; + await newStream.initialize(); + streams.add(newStream); emit(CallEvent.kFeedsChanged, streams); } @@ -564,7 +596,7 @@ class CallSession { if (purpose == SDPStreamMetadataPurpose.Usermedia) { speakerOn = type == CallType.kVideo; - if (voip.delegate.isWeb && !voip.delegate.isBackgroud) { + if (!voip.delegate.isWeb && !voip.delegate.isBackgroud) { final audioTrack = stream.getAudioTracks()[0]; audioTrack.enableSpeakerphone(speakerOn); } @@ -586,11 +618,10 @@ class CallSession { // Try to find a feed with the same purpose as the new stream, // if we find it replace the old stream with the new one - final WrappedMediaStream? existingStream = getRemoteStreams.firstWhere( - (element) => element.purpose == purpose, - orElse: () => Null as WrappedMediaStream); - if (existingStream != null) { - existingStream.setNewStream(stream); + final existingStream = + getRemoteStreams.where((element) => element.purpose == purpose); + if (existingStream.isNotEmpty) { + existingStream.first.setNewStream(stream); } else { final newStream = WrappedMediaStream( renderer: voip.delegate.createRenderer(), @@ -601,8 +632,10 @@ class CallSession { client: client, audioMuted: audioMuted, videoMuted: videoMuted, + isWeb: voip.delegate.isWeb, ); - streams[stream.id] = newStream; + await newStream.initialize(); + streams.add(newStream); } emit(CallEvent.kFeedsChanged, streams); Logs().i('Pushed remote stream (id="${stream.id}", purpose=$purpose)'); @@ -756,7 +789,8 @@ class CallSession { setCallState(CallState.kEnded); voip.currentCID = null; voip.calls.remove(callId); - + cleanUp(); + voip.delegate.handleCallEnded(this); if (shouldEmit) { emit(CallEvent.kHangup, this); } @@ -883,7 +917,7 @@ class CallSession { } void cleanUp() async { - streams.forEach((id, stream) { + streams.forEach((stream) { stream.dispose(); }); streams.clear(); @@ -992,25 +1026,30 @@ class CallSession { void tryRemoveStopedStreams() { final removedStreams = {}; - streams.forEach((id, stream) { + streams.forEach((stream) { if (stream.stopped) { - removedStreams[id] = stream; + removedStreams[stream.stream!.id] = stream; } }); - streams.removeWhere((id, stream) => removedStreams.containsKey(id)); + streams + .removeWhere((stream) => removedStreams.containsKey(stream.stream!.id)); removedStreams.forEach((id, element) { - _removeStream(id); + _removeStream(element.stream!); }); } - Future _removeStream(String streamId) async { - Logs().v('Removing feed with stream id $streamId'); - final removedStream = streams.remove(streamId); - if (removedStream == null) { - Logs().v('Didn\'t find the feed with stream id $streamId to delete'); + Future _removeStream(MediaStream stream) async { + Logs().v('Removing feed with stream id ${stream.id}'); + + final it = streams.where((element) => element.stream!.id == stream.id); + if (it.isEmpty) { + Logs().v('Didn\'t find the feed with stream id ${stream.id} to delete'); return; } - await removedStream.dispose(); + final wpstream = it.first; + streams.removeWhere((element) => element.stream!.id == stream.id); + emit(CallEvent.kFeedsChanged, streams); + await wpstream.dispose(); } Map _getOfferAnswerConstraints({bool iceRestart = false}) { @@ -1281,7 +1320,6 @@ class VoIP { // hangup in any case, either if the other party hung up or we did on another device call.terminate(CallParty.kRemote, event.content['reason'] ?? CallErrorCode.UserHangup, true); - delegate.handleCallEnded(call); } else { Logs().v('[VOIP] onCallHangup: Session [$callId] not found!'); } From 9bc35a216a28e3c107e8d501da97205ebe705e02 Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Thu, 2 Dec 2021 23:04:33 +0800 Subject: [PATCH 13/15] chore: Use `fireCallEvent` instead of emit. --- lib/src/voip.dart | 61 +++++++++++++++++++---------------------------- 1 file changed, 25 insertions(+), 36 deletions(-) diff --git a/lib/src/voip.dart b/lib/src/voip.dart index b9a9292c..3d0d5b14 100644 --- a/lib/src/voip.dart +++ b/lib/src/voip.dart @@ -301,6 +301,7 @@ class CallSession { late User remoteUser; late CallParty hangupParty; late String hangupReason; + late CallError lastError; SDPStreamMetadata? remoteSDPStreamMetadata; List usermediaSenders = []; @@ -383,7 +384,7 @@ class CallSession { Logs().v('[VOIP] Call invite has expired. Hanging up.'); hangupParty = CallParty.kRemote; // effectively setCallState(CallState.kEnded); - emit(CallEvent.kHangup); + fireCallEvent(CallEvent.kHangup); } ringingTimer?.cancel(); ringingTimer = null; @@ -447,7 +448,7 @@ class CallSession { final newLocalOnHold = await isLocalOnHold(); if (prevLocalOnHold != newLocalOnHold) { localHold = newLocalOnHold; - emit(CallEvent.kLocalHoldUnhold, newLocalOnHold); + fireCallEvent(CallEvent.kLocalHoldUnhold); } } @@ -470,14 +471,14 @@ class CallSession { } else { Logs().i('Not found purpose for remote stream $streamId, remove it?'); wpstream.stopped = true; - emit(CallEvent.kFeedsChanged, streams); + fireCallEvent(CallEvent.kFeedsChanged); } }); } void onSDPStreamMetadataReceived(SDPStreamMetadata metadata) async { _updateRemoteSDPStreamMetadata(metadata); - emit(CallEvent.kFeedsChanged, streams); + fireCallEvent(CallEvent.kFeedsChanged); } void onCandidatesReceived(List candidates) { @@ -508,7 +509,7 @@ class CallSession { void onAssertedIdentityReceived(AssertedIdentity identity) async { remoteAssertedIdentity = identity; - emit(CallEvent.kAssertedIdentityChanged); + fireCallEvent(CallEvent.kAssertedIdentityChanged); } bool get screensharingEnabled => localScreenSharingStream != null; @@ -536,10 +537,9 @@ class CallSession { _addLocalStream(stream, SDPStreamMetadataPurpose.Screenshare); return true; } catch (err) { - emit( - CallEvent.kError, - CallError(CallErrorCode.NoUserMedia, - 'Failed to get screen-sharing stream: ', err)); + fireCallEvent(CallEvent.kError); + lastError = CallError(CallErrorCode.NoUserMedia, + 'Failed to get screen-sharing stream: ', err); return false; } } else { @@ -550,8 +550,8 @@ class CallSession { await track.stop(); } localScreenSharingStream!.stopped = true; - _removeStream(localScreenSharingStream!.stream!); - emit(CallEvent.kFeedsChanged, streams); + await _removeStream(localScreenSharingStream!.stream!); + fireCallEvent(CallEvent.kFeedsChanged); return false; } } @@ -576,7 +576,7 @@ class CallSession { ); await newStream.initialize(); streams.add(newStream); - emit(CallEvent.kFeedsChanged, streams); + fireCallEvent(CallEvent.kFeedsChanged); } if (addToPeerConnection) { @@ -591,7 +591,7 @@ class CallSession { usermediaSenders.add(await pc!.addTrack(track, stream)); }); } - emit(CallEvent.kFeedsChanged, streams); + fireCallEvent(CallEvent.kFeedsChanged); } if (purpose == SDPStreamMetadataPurpose.Usermedia) { @@ -637,15 +637,13 @@ class CallSession { await newStream.initialize(); streams.add(newStream); } - emit(CallEvent.kFeedsChanged, streams); + fireCallEvent(CallEvent.kFeedsChanged); Logs().i('Pushed remote stream (id="${stream.id}", purpose=$purpose)'); } void setCallState(CallState newState) { - final oldState = state; - state = newState; _callStateController.add(newState); - emit(CallEvent.kState, state, oldState); + fireCallEvent(CallEvent.kState); } void setLocalVideoMuted(bool muted) { @@ -672,7 +670,7 @@ class CallSession { : TransceiverDirection.SendRecv); } _updateMuteStatus(); - emit(CallEvent.kRemoteHoldUnhold, remoteOnHold); + fireCallEvent(CallEvent.kRemoteHoldUnhold); } bool get isRemoteOnHold => remoteOnHold; @@ -792,7 +790,7 @@ class CallSession { cleanUp(); voip.delegate.handleCallEnded(this); if (shouldEmit) { - emit(CallEvent.kHangup, this); + fireCallEvent(CallEvent.kHangup); } } @@ -1048,7 +1046,7 @@ class CallSession { } final wpstream = it.first; streams.removeWhere((element) => element.stream!.id == stream.id); - emit(CallEvent.kFeedsChanged, streams); + fireCallEvent(CallEvent.kFeedsChanged); await wpstream.dispose(); } @@ -1078,7 +1076,7 @@ class CallSession { } } - void emit(CallEvent event, [dynamic arg1, dynamic arg2, dynamic arg3]) { + void fireCallEvent(CallEvent event) { _callEventController.add(event); Logs().i('CallEvent: ${event.toString()}'); switch (event) { @@ -1104,28 +1102,19 @@ class CallSession { void _getLocalOfferFailed(dynamic err) { Logs().e('Failed to get local offer ${err.toString()}'); - - emit( - CallEvent.kError, - CallError( - CallErrorCode.LocalOfferFailed, - 'Failed to get local offer!', - err, - ), - ); + fireCallEvent(CallEvent.kError); + lastError = CallError( + CallErrorCode.LocalOfferFailed, 'Failed to get local offer!', err); terminate(CallParty.kLocal, CallErrorCode.LocalOfferFailed, false); } void _getUserMediaFailed(dynamic err) { Logs().w('Failed to get user media - ending call ${err.toString()}'); - emit( - CallEvent.kError, - CallError( + fireCallEvent(CallEvent.kError); + lastError = CallError( CallErrorCode.NoUserMedia, 'Couldn\'t start capturing media! Is your microphone set up and does this app have permission?', - err, - ), - ); + err); terminate(CallParty.kLocal, CallErrorCode.NoUserMedia, false); } From e5c8e4f4fe5545fa8abbd462ad553f1488fd023e Mon Sep 17 00:00:00 2001 From: Krille Fear Date: Thu, 2 Dec 2021 15:12:59 +0000 Subject: [PATCH 14/15] chore: Time unit replacement. --- lib/src/voip.dart | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/src/voip.dart b/lib/src/voip.dart index 3d0d5b14..6966e0bc 100644 --- a/lib/src/voip.dart +++ b/lib/src/voip.dart @@ -880,7 +880,7 @@ class CallSession { pc!.onIceGatheringState = (RTCIceGatheringState state) async { Logs().v('[VOIP] IceGatheringState => ${state.toString()}'); if (state == RTCIceGatheringState.RTCIceGatheringStateGathering) { - Timer(Duration(milliseconds: 3000), () async { + Timer(Duration(seconds: 3), () async { if (!iceGatheringFinished) { iceGatheringFinished = true; await _candidateReady(); From 61da76a26a1787d9dbb3f4f800da439dfb231206 Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Thu, 2 Dec 2021 23:18:01 +0800 Subject: [PATCH 15/15] chore: Implement the onAnsweredElsewhere function. --- lib/src/voip.dart | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lib/src/voip.dart b/lib/src/voip.dart index 3d0d5b14..42ffcd21 100644 --- a/lib/src/voip.dart +++ b/lib/src/voip.dart @@ -1258,6 +1258,9 @@ class VoIP { if (!call._answeredByUs) { delegate.stopRingtone(); } + if (call.state == CallState.kRinging) { + call.onAnsweredElsewhere('Call ID ' + callId + ' answered elsewhere'); + } return; }