fix: ability to upgrade audio calls to video calls
fix: setMicrophoneMuted is now async to match setVideoMuted
This commit is contained in:
parent
40c553c44d
commit
7ce6595b3d
|
|
@ -410,6 +410,20 @@ class CallSession {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// returns whether a 1:1 call sender has video tracks
|
||||||
|
Future<bool> hasVideoToSend() async {
|
||||||
|
final transceivers = await pc!.getTransceivers();
|
||||||
|
final localUserMediaVideoTrack = localUserMediaStream?.stream
|
||||||
|
?.getTracks()
|
||||||
|
.singleWhereOrNull((track) => track.kind == 'video');
|
||||||
|
|
||||||
|
// check if we have a video track locally and have transceivers setup correctly.
|
||||||
|
return localUserMediaVideoTrack != null &&
|
||||||
|
transceivers.singleWhereOrNull((transceiver) =>
|
||||||
|
transceiver.sender.track?.id == localUserMediaVideoTrack.id) !=
|
||||||
|
null;
|
||||||
|
}
|
||||||
|
|
||||||
Timer? inviteTimer;
|
Timer? inviteTimer;
|
||||||
Timer? ringingTimer;
|
Timer? ringingTimer;
|
||||||
|
|
||||||
|
|
@ -622,8 +636,15 @@ class CallSession {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await pc!.setRemoteDescription(description);
|
await pc!.setRemoteDescription(description);
|
||||||
|
RTCSessionDescription? answer;
|
||||||
if (description.type == 'offer') {
|
if (description.type == 'offer') {
|
||||||
final answer = await pc!.createAnswer({});
|
try {
|
||||||
|
answer = await pc!.createAnswer({});
|
||||||
|
} catch (e) {
|
||||||
|
await terminate(CallParty.kLocal, CallErrorCode.CreateAnswer, true);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
await sendCallNegotiate(
|
await sendCallNegotiate(
|
||||||
room, callId, Timeouts.lifetimeMs, localPartyId, answer.sdp!,
|
room, callId, Timeouts.lifetimeMs, localPartyId, answer.sdp!,
|
||||||
type: answer.type!);
|
type: answer.type!);
|
||||||
|
|
@ -747,7 +768,9 @@ class CallSession {
|
||||||
if (stream == null) {
|
if (stream == null) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
stream.getVideoTracks().forEach((track) {
|
stream.getTracks().forEach((track) {
|
||||||
|
// screen sharing should only have 1 video track anyway, so this only
|
||||||
|
// fires once
|
||||||
track.onEnded = () {
|
track.onEnded = () {
|
||||||
setScreensharingEnabled(false);
|
setScreensharingEnabled(false);
|
||||||
};
|
};
|
||||||
|
|
@ -761,16 +784,21 @@ class CallSession {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
for (final sender in screensharingSenders) {
|
try {
|
||||||
await pc!.removeTrack(sender);
|
for (final sender in screensharingSenders) {
|
||||||
|
await pc!.removeTrack(sender);
|
||||||
|
}
|
||||||
|
for (final track in localScreenSharingStream!.stream!.getTracks()) {
|
||||||
|
await track.stop();
|
||||||
|
}
|
||||||
|
localScreenSharingStream!.stopped = true;
|
||||||
|
await _removeStream(localScreenSharingStream!.stream!);
|
||||||
|
fireCallEvent(CallEvent.kFeedsChanged);
|
||||||
|
return false;
|
||||||
|
} catch (e) {
|
||||||
|
Logs().e('[VOIP] stopping screen sharing track failed', e);
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
for (final track in localScreenSharingStream!.stream!.getTracks()) {
|
|
||||||
await track.stop();
|
|
||||||
}
|
|
||||||
localScreenSharingStream!.stopped = true;
|
|
||||||
await _removeStream(localScreenSharingStream!.stream!);
|
|
||||||
fireCallEvent(CallEvent.kFeedsChanged);
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -918,16 +946,85 @@ class CallSession {
|
||||||
fireCallEvent(CallEvent.kState);
|
fireCallEvent(CallEvent.kState);
|
||||||
}
|
}
|
||||||
|
|
||||||
void setLocalVideoMuted(bool muted) {
|
Future<void> setLocalVideoMuted(bool muted) async {
|
||||||
|
if (!muted) {
|
||||||
|
final videoToSend = await hasVideoToSend();
|
||||||
|
if (!videoToSend) {
|
||||||
|
if (remoteSDPStreamMetadata == null) return;
|
||||||
|
await insertVideoTrackToAudioOnlyStream();
|
||||||
|
}
|
||||||
|
}
|
||||||
localUserMediaStream?.setVideoMuted(muted);
|
localUserMediaStream?.setVideoMuted(muted);
|
||||||
_updateMuteStatus();
|
await _updateMuteStatus();
|
||||||
|
}
|
||||||
|
|
||||||
|
// used for upgrading 1:1 calls
|
||||||
|
Future<void> insertVideoTrackToAudioOnlyStream() async {
|
||||||
|
if (localUserMediaStream != null && localUserMediaStream!.stream != null) {
|
||||||
|
final stream = await _getUserMedia(CallType.kVideo);
|
||||||
|
if (stream != null) {
|
||||||
|
Logs().e('[VOIP] running replaceTracks() on stream: ${stream.id}');
|
||||||
|
_setTracksEnabled(stream.getVideoTracks(), true);
|
||||||
|
// replace local tracks
|
||||||
|
for (final track in localUserMediaStream!.stream!.getTracks()) {
|
||||||
|
try {
|
||||||
|
await localUserMediaStream!.stream!.removeTrack(track);
|
||||||
|
await track.stop();
|
||||||
|
} catch (e) {
|
||||||
|
Logs().w('failed to stop track');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
final streamTracks = stream.getTracks();
|
||||||
|
for (final newTrack in streamTracks) {
|
||||||
|
await localUserMediaStream!.stream!.addTrack(newTrack);
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove any screen sharing or remote transceivers, these don't need
|
||||||
|
// to be replaced anyway.
|
||||||
|
final transceivers = await pc!.getTransceivers();
|
||||||
|
transceivers.removeWhere((transceiver) =>
|
||||||
|
transceiver.sender.track == null ||
|
||||||
|
(localScreenSharingStream != null &&
|
||||||
|
localScreenSharingStream!.stream != null &&
|
||||||
|
localScreenSharingStream!.stream!
|
||||||
|
.getTracks()
|
||||||
|
.map((e) => e.id)
|
||||||
|
.contains(transceiver.sender.track?.id)));
|
||||||
|
|
||||||
|
// in an ideal case the following should happen
|
||||||
|
// - audio track gets replaced
|
||||||
|
// - new video track gets added
|
||||||
|
for (final newTrack in streamTracks) {
|
||||||
|
final transceiver = transceivers.singleWhereOrNull(
|
||||||
|
(transceiver) => transceiver.sender.track!.kind == newTrack.kind);
|
||||||
|
if (transceiver != null) {
|
||||||
|
Logs().d(
|
||||||
|
'[VOIP] replacing ${transceiver.sender.track} in transceiver');
|
||||||
|
final oldSender = transceiver.sender;
|
||||||
|
await oldSender.replaceTrack(newTrack);
|
||||||
|
await transceiver.setDirection(
|
||||||
|
await transceiver.getDirection() ==
|
||||||
|
TransceiverDirection.Inactive // upgrade, send now
|
||||||
|
? TransceiverDirection.SendOnly
|
||||||
|
: TransceiverDirection.SendRecv,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
// adding transceiver
|
||||||
|
Logs().d('[VOIP] adding track $newTrack to pc');
|
||||||
|
await pc!.addTrack(newTrack, localUserMediaStream!.stream!);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// for renderer to be able to show new video track
|
||||||
|
localUserMediaStream?.renderer.srcObject = stream;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bool get isLocalVideoMuted => localUserMediaStream?.isVideoMuted() ?? false;
|
bool get isLocalVideoMuted => localUserMediaStream?.isVideoMuted() ?? false;
|
||||||
|
|
||||||
void setMicrophoneMuted(bool muted) {
|
Future<void> setMicrophoneMuted(bool muted) async {
|
||||||
localUserMediaStream?.setAudioMuted(muted);
|
localUserMediaStream?.setAudioMuted(muted);
|
||||||
_updateMuteStatus();
|
await _updateMuteStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
bool get isMicrophoneMuted => localUserMediaStream?.isAudioMuted() ?? false;
|
bool get isMicrophoneMuted => localUserMediaStream?.isAudioMuted() ?? false;
|
||||||
|
|
@ -1375,9 +1472,12 @@ class CallSession {
|
||||||
if (event.streams.isNotEmpty) {
|
if (event.streams.isNotEmpty) {
|
||||||
final stream = event.streams[0];
|
final stream = event.streams[0];
|
||||||
_addRemoteStream(stream);
|
_addRemoteStream(stream);
|
||||||
stream.getVideoTracks().forEach((track) {
|
stream.getTracks().forEach((track) {
|
||||||
track.onEnded = () {
|
track.onEnded = () {
|
||||||
_removeStream(stream);
|
if (stream.getTracks().isEmpty) {
|
||||||
|
Logs().d('[VOIP] detected a empty stream, removing it');
|
||||||
|
_removeStream(stream);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -582,8 +582,9 @@ class GroupCall {
|
||||||
final stream = await _getDisplayMedia();
|
final stream = await _getDisplayMedia();
|
||||||
stream.getTracks().forEach((track) {
|
stream.getTracks().forEach((track) {
|
||||||
track.onEnded = () {
|
track.onEnded = () {
|
||||||
|
// screen sharing should only have 1 video track anyway, so this only
|
||||||
|
// fires once
|
||||||
setScreensharingEnabled(false, '');
|
setScreensharingEnabled(false, '');
|
||||||
track.onEnded = null;
|
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
Logs().v(
|
Logs().v(
|
||||||
|
|
@ -1159,15 +1160,12 @@ class GroupCall {
|
||||||
void onActiveSpeakerLoop() async {
|
void onActiveSpeakerLoop() async {
|
||||||
String? nextActiveSpeaker;
|
String? nextActiveSpeaker;
|
||||||
// idc about screen sharing atm.
|
// idc about screen sharing atm.
|
||||||
for (final callFeed in userMediaStreams) {
|
for (final stream in userMediaStreams) {
|
||||||
if (callFeed.userId == client.userID && callFeed.pc == null) {
|
if (stream.userId == client.userID && stream.pc == null) {
|
||||||
activeSpeakerLoopTimeout?.cancel();
|
|
||||||
activeSpeakerLoopTimeout =
|
|
||||||
Timer(activeSpeakerInterval, onActiveSpeakerLoop);
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
final List<StatsReport> statsReport = await callFeed.pc!.getStats();
|
final List<StatsReport> statsReport = await stream.pc!.getStats();
|
||||||
statsReport
|
statsReport
|
||||||
.removeWhere((element) => !element.values.containsKey('audioLevel'));
|
.removeWhere((element) => !element.values.containsKey('audioLevel'));
|
||||||
|
|
||||||
|
|
@ -1178,7 +1176,7 @@ class GroupCall {
|
||||||
element.values['kind'] == 'audio')
|
element.values['kind'] == 'audio')
|
||||||
?.values['audioLevel'];
|
?.values['audioLevel'];
|
||||||
if (otherPartyAudioLevel != null) {
|
if (otherPartyAudioLevel != null) {
|
||||||
audioLevelsMap[callFeed.userId] = otherPartyAudioLevel;
|
audioLevelsMap[stream.userId] = otherPartyAudioLevel;
|
||||||
}
|
}
|
||||||
|
|
||||||
// https://www.w3.org/TR/webrtc-stats/#dom-rtcstatstype-media-source
|
// https://www.w3.org/TR/webrtc-stats/#dom-rtcstatstype-media-source
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue