feat: active speaker in group calls
This commit is contained in:
parent
0eb8dc96c1
commit
00154f3c78
|
|
@ -58,6 +58,7 @@ class WrappedMediaStream {
|
||||||
VideoRenderer renderer;
|
VideoRenderer renderer;
|
||||||
final bool isWeb;
|
final bool isWeb;
|
||||||
final bool isGroupCall;
|
final bool isGroupCall;
|
||||||
|
final RTCPeerConnection? pc;
|
||||||
|
|
||||||
/// for debug
|
/// for debug
|
||||||
String get title => '$displayName:$purpose:a[$audioMuted]:v[$videoMuted]';
|
String get title => '$displayName:$purpose:a[$audioMuted]:v[$videoMuted]';
|
||||||
|
|
@ -70,6 +71,7 @@ class WrappedMediaStream {
|
||||||
|
|
||||||
WrappedMediaStream(
|
WrappedMediaStream(
|
||||||
{this.stream,
|
{this.stream,
|
||||||
|
this.pc,
|
||||||
required this.renderer,
|
required this.renderer,
|
||||||
required this.room,
|
required this.room,
|
||||||
required this.userId,
|
required this.userId,
|
||||||
|
|
@ -778,16 +780,18 @@ class CallSession {
|
||||||
existingStream.first.setNewStream(stream);
|
existingStream.first.setNewStream(stream);
|
||||||
} else {
|
} else {
|
||||||
final newStream = WrappedMediaStream(
|
final newStream = WrappedMediaStream(
|
||||||
renderer: voip.delegate.createRenderer(),
|
renderer: voip.delegate.createRenderer(),
|
||||||
userId: client.userID!,
|
userId: client.userID!,
|
||||||
room: opts.room,
|
room: opts.room,
|
||||||
stream: stream,
|
stream: stream,
|
||||||
purpose: purpose,
|
purpose: purpose,
|
||||||
client: client,
|
client: client,
|
||||||
audioMuted: stream.getAudioTracks().isEmpty,
|
audioMuted: stream.getAudioTracks().isEmpty,
|
||||||
videoMuted: stream.getVideoTracks().isEmpty,
|
videoMuted: stream.getVideoTracks().isEmpty,
|
||||||
isWeb: voip.delegate.isWeb,
|
isWeb: voip.delegate.isWeb,
|
||||||
isGroupCall: groupCallId != null);
|
isGroupCall: groupCallId != null,
|
||||||
|
pc: pc,
|
||||||
|
);
|
||||||
await newStream.initialize();
|
await newStream.initialize();
|
||||||
streams.add(newStream);
|
streams.add(newStream);
|
||||||
onStreamAdd.add(newStream);
|
onStreamAdd.add(newStream);
|
||||||
|
|
@ -839,16 +843,18 @@ class CallSession {
|
||||||
existingStream.first.setNewStream(stream);
|
existingStream.first.setNewStream(stream);
|
||||||
} else {
|
} else {
|
||||||
final newStream = WrappedMediaStream(
|
final newStream = WrappedMediaStream(
|
||||||
renderer: voip.delegate.createRenderer(),
|
renderer: voip.delegate.createRenderer(),
|
||||||
userId: remoteUser!.id,
|
userId: remoteUser!.id,
|
||||||
room: opts.room,
|
room: opts.room,
|
||||||
stream: stream,
|
stream: stream,
|
||||||
purpose: purpose,
|
purpose: purpose,
|
||||||
client: client,
|
client: client,
|
||||||
audioMuted: audioMuted,
|
audioMuted: audioMuted,
|
||||||
videoMuted: videoMuted,
|
videoMuted: videoMuted,
|
||||||
isWeb: voip.delegate.isWeb,
|
isWeb: voip.delegate.isWeb,
|
||||||
isGroupCall: groupCallId != null);
|
isGroupCall: groupCallId != null,
|
||||||
|
pc: pc,
|
||||||
|
);
|
||||||
await newStream.initialize();
|
await newStream.initialize();
|
||||||
streams.add(newStream);
|
streams.add(newStream);
|
||||||
onStreamAdd.add(newStream);
|
onStreamAdd.add(newStream);
|
||||||
|
|
|
||||||
|
|
@ -176,10 +176,8 @@ class GroupCall {
|
||||||
|
|
||||||
static const updateExpireTsTimerDuration = Duration(seconds: 15);
|
static const updateExpireTsTimerDuration = Duration(seconds: 15);
|
||||||
static const expireTsBumpDuration = Duration(seconds: 45);
|
static const expireTsBumpDuration = Duration(seconds: 45);
|
||||||
|
static const activeSpeakerInterval = Duration(seconds: 5);
|
||||||
|
|
||||||
var activeSpeakerInterval = 1000;
|
|
||||||
var retryCallInterval = 5000;
|
|
||||||
var participantTimeout = 1000 * 15;
|
|
||||||
final Client client;
|
final Client client;
|
||||||
final VoIP voip;
|
final VoIP voip;
|
||||||
final Room room;
|
final Room room;
|
||||||
|
|
@ -189,7 +187,7 @@ class GroupCall {
|
||||||
final RTCDataChannelInit? dataChannelOptions;
|
final RTCDataChannelInit? dataChannelOptions;
|
||||||
String state = GroupCallState.LocalCallFeedUninitialized;
|
String state = GroupCallState.LocalCallFeedUninitialized;
|
||||||
StreamSubscription<CallSession>? _callSubscription;
|
StreamSubscription<CallSession>? _callSubscription;
|
||||||
|
final Map<String, double> audioLevelsMap = {};
|
||||||
String? activeSpeaker; // userId
|
String? activeSpeaker; // userId
|
||||||
WrappedMediaStream? localUserMediaStream;
|
WrappedMediaStream? localUserMediaStream;
|
||||||
WrappedMediaStream? localScreenshareStream;
|
WrappedMediaStream? localScreenshareStream;
|
||||||
|
|
@ -373,18 +371,18 @@ class GroupCall {
|
||||||
}
|
}
|
||||||
|
|
||||||
final userId = client.userID;
|
final userId = client.userID;
|
||||||
|
|
||||||
final newStream = WrappedMediaStream(
|
final newStream = WrappedMediaStream(
|
||||||
renderer: voip.delegate.createRenderer(),
|
renderer: voip.delegate.createRenderer(),
|
||||||
stream: stream,
|
stream: stream,
|
||||||
userId: userId!,
|
userId: userId!,
|
||||||
room: room,
|
room: room,
|
||||||
client: client,
|
client: client,
|
||||||
purpose: SDPStreamMetadataPurpose.Usermedia,
|
purpose: SDPStreamMetadataPurpose.Usermedia,
|
||||||
audioMuted: stream.getAudioTracks().isEmpty,
|
audioMuted: stream.getAudioTracks().isEmpty,
|
||||||
videoMuted: stream.getVideoTracks().isEmpty,
|
videoMuted: stream.getVideoTracks().isEmpty,
|
||||||
isWeb: voip.delegate.isWeb,
|
isWeb: voip.delegate.isWeb,
|
||||||
isGroupCall: true);
|
isGroupCall: true,
|
||||||
|
);
|
||||||
|
|
||||||
localUserMediaStream = newStream;
|
localUserMediaStream = newStream;
|
||||||
await localUserMediaStream!.initialize();
|
await localUserMediaStream!.initialize();
|
||||||
|
|
@ -597,16 +595,17 @@ class GroupCall {
|
||||||
'Screensharing permissions granted. Setting screensharing enabled on all calls');
|
'Screensharing permissions granted. Setting screensharing enabled on all calls');
|
||||||
localDesktopCapturerSourceId = desktopCapturerSourceId;
|
localDesktopCapturerSourceId = desktopCapturerSourceId;
|
||||||
localScreenshareStream = WrappedMediaStream(
|
localScreenshareStream = WrappedMediaStream(
|
||||||
renderer: voip.delegate.createRenderer(),
|
renderer: voip.delegate.createRenderer(),
|
||||||
stream: stream,
|
stream: stream,
|
||||||
userId: client.userID!,
|
userId: client.userID!,
|
||||||
room: room,
|
room: room,
|
||||||
client: client,
|
client: client,
|
||||||
purpose: SDPStreamMetadataPurpose.Screenshare,
|
purpose: SDPStreamMetadataPurpose.Screenshare,
|
||||||
audioMuted: stream.getAudioTracks().isEmpty,
|
audioMuted: stream.getAudioTracks().isEmpty,
|
||||||
videoMuted: stream.getVideoTracks().isEmpty,
|
videoMuted: stream.getVideoTracks().isEmpty,
|
||||||
isWeb: voip.delegate.isWeb,
|
isWeb: voip.delegate.isWeb,
|
||||||
isGroupCall: true);
|
isGroupCall: true,
|
||||||
|
);
|
||||||
|
|
||||||
addScreenshareStream(localScreenshareStream!);
|
addScreenshareStream(localScreenshareStream!);
|
||||||
await localScreenshareStream!.initialize();
|
await localScreenshareStream!.initialize();
|
||||||
|
|
@ -1123,7 +1122,7 @@ class GroupCall {
|
||||||
}
|
}
|
||||||
|
|
||||||
userMediaStreams.removeWhere((element) => element.userId == stream.userId);
|
userMediaStreams.removeWhere((element) => element.userId == stream.userId);
|
||||||
|
audioLevelsMap.remove(stream.userId);
|
||||||
onStreamRemoved.add(stream);
|
onStreamRemoved.add(stream);
|
||||||
|
|
||||||
if (stream.isLocal()) {
|
if (stream.isLocal()) {
|
||||||
|
|
@ -1139,41 +1138,50 @@ class GroupCall {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void onActiveSpeakerLoop() {
|
void onActiveSpeakerLoop() async {
|
||||||
/* TODO(duan):
|
|
||||||
var topAvg = 0.0;
|
|
||||||
String? nextActiveSpeaker;
|
String? nextActiveSpeaker;
|
||||||
|
// idc about screen sharing atm.
|
||||||
userMediaFeeds.forEach((callFeed) {
|
for (final callFeed in userMediaStreams) {
|
||||||
if (callFeed.userId == client.userID && userMediaFeeds.length > 1) {
|
if (callFeed.userId == client.userID && callFeed.pc == null) {
|
||||||
return;
|
activeSpeakerLoopTimeout?.cancel();
|
||||||
|
activeSpeakerLoopTimeout =
|
||||||
|
Timer(activeSpeakerInterval, onActiveSpeakerLoop);
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
var total = 0;
|
final List<StatsReport> statsReport = await callFeed.pc!.getStats();
|
||||||
|
statsReport
|
||||||
|
.removeWhere((element) => !element.values.containsKey('audioLevel'));
|
||||||
|
|
||||||
for (var i = 0; i < callFeed.speakingVolumeSamples.length; i++) {
|
// https://www.w3.org/TR/webrtc-stats/#dom-rtcstatstype-media-source
|
||||||
final volume = callFeed.speakingVolumeSamples[i];
|
// firefox does not seem to have this though. Works on chrome and android
|
||||||
total += max(volume, SPEAKING_THRESHOLD);
|
audioLevelsMap[client.userID!] = statsReport
|
||||||
}
|
.lastWhere((element) =>
|
||||||
|
element.type == 'media-source' &&
|
||||||
|
element.values['kind'] == 'audio')
|
||||||
|
.values['audioLevel'];
|
||||||
|
// works everywhere?
|
||||||
|
audioLevelsMap[callFeed.userId] = statsReport
|
||||||
|
.lastWhere((element) => element.type == 'inbound-rtp')
|
||||||
|
.values['audioLevel'];
|
||||||
|
}
|
||||||
|
|
||||||
final avg = total / callFeed.speakingVolumeSamples.length;
|
double maxAudioLevel = double.negativeInfinity;
|
||||||
|
// TODO: we probably want a threshold here?
|
||||||
if (topAvg != 0 || avg > topAvg) {
|
audioLevelsMap.forEach((key, value) {
|
||||||
topAvg = avg;
|
if (value > maxAudioLevel) {
|
||||||
nextActiveSpeaker = callFeed.userId;
|
nextActiveSpeaker = key;
|
||||||
}
|
maxAudioLevel = value;
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
if (nextActiveSpeaker != null &&
|
if (nextActiveSpeaker != null && activeSpeaker != nextActiveSpeaker) {
|
||||||
activeSpeaker != nextActiveSpeaker &&
|
|
||||||
topAvg > SPEAKING_THRESHOLD) {
|
|
||||||
activeSpeaker = nextActiveSpeaker;
|
activeSpeaker = nextActiveSpeaker;
|
||||||
onGroupCallEvent.add(GroupCallEvent.ActiveSpeakerChanged);
|
onGroupCallEvent.add(GroupCallEvent.ActiveSpeakerChanged);
|
||||||
}
|
}
|
||||||
|
activeSpeakerLoopTimeout?.cancel();
|
||||||
activeSpeakerLoopTimeout =
|
activeSpeakerLoopTimeout =
|
||||||
Timer(Duration(seconds: activeSpeakerInterval), onActiveSpeakerLoop);
|
Timer(activeSpeakerInterval, onActiveSpeakerLoop);
|
||||||
*/
|
|
||||||
}
|
}
|
||||||
|
|
||||||
WrappedMediaStream? getScreenshareStreamByUserId(String userId) {
|
WrappedMediaStream? getScreenshareStreamByUserId(String userId) {
|
||||||
|
|
|
||||||
|
|
@ -856,8 +856,7 @@ class VoIP {
|
||||||
final Map<String, int> participants = {};
|
final Map<String, int> participants = {};
|
||||||
final callMemberEvents = room.states.tryGetMap<String, Event>(
|
final callMemberEvents = room.states.tryGetMap<String, Event>(
|
||||||
EventTypes.GroupCallMemberPrefix);
|
EventTypes.GroupCallMemberPrefix);
|
||||||
Logs().e(
|
|
||||||
'callmemeberEvents length ${callMemberEvents?.length}');
|
|
||||||
if (callMemberEvents != null) {
|
if (callMemberEvents != null) {
|
||||||
callMemberEvents.forEach((userId, memberEvent) async {
|
callMemberEvents.forEach((userId, memberEvent) async {
|
||||||
final callMemberEvent = groupCallEvent.room.getState(
|
final callMemberEvent = groupCallEvent.room.getState(
|
||||||
|
|
@ -872,7 +871,6 @@ class VoIP {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
Logs().e(participants.toString());
|
|
||||||
if (!participants.values.any((expire_ts) =>
|
if (!participants.values.any((expire_ts) =>
|
||||||
expire_ts > DateTime.now().millisecondsSinceEpoch)) {
|
expire_ts > DateTime.now().millisecondsSinceEpoch)) {
|
||||||
Logs().i(
|
Logs().i(
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue