Line data Source code
1 : /*
2 : * Copyright (C) 2004-2026 Savoir-faire Linux Inc.
3 : *
4 : * This program is free software: you can redistribute it and/or modify
5 : * it under the terms of the GNU General Public License as published by
6 : * the Free Software Foundation, either version 3 of the License, or
7 : * (at your option) any later version.
8 : *
9 : * This program is distributed in the hope that it will be useful,
10 : * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 : * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 : * GNU General Public License for more details.
13 : *
14 : * You should have received a copy of the GNU General Public License
15 : * along with this program. If not, see <https://www.gnu.org/licenses/>.
16 : */
17 :
18 : #include <regex>
19 : #include <sstream>
20 :
21 : #include "conference.h"
22 : #include "manager.h"
23 : #include "audio/audiolayer.h"
24 : #include "jamidht/jamiaccount.h"
25 : #include "string_utils.h"
26 : #include "sip/siptransport.h"
27 :
28 : #include "client/videomanager.h"
29 : #include "tracepoint.h"
30 : #ifdef ENABLE_VIDEO
31 : #include "call.h"
32 : #include "video/video_input.h"
33 : #include "video/video_mixer.h"
34 : #endif
35 :
36 : #ifdef ENABLE_PLUGIN
37 : #include "plugin/jamipluginmanager.h"
38 : #endif
39 :
40 : #include "call_factory.h"
41 :
42 : #include "logger.h"
43 : #include "jami/media_const.h"
44 : #include "audio/ringbufferpool.h"
45 : #include "sip/sipcall.h"
46 : #include "json_utils.h"
47 :
48 : #include <opendht/thread_pool.h>
49 :
50 : using namespace std::literals;
51 :
52 : namespace jami {
53 :
54 37 : Conference::Conference(const std::shared_ptr<Account>& account, const std::string& confId)
55 37 : : id_(confId.empty() ? Manager::instance().callFactory.getNewCallID() : confId)
56 37 : , account_(account)
57 : #ifdef ENABLE_VIDEO
58 74 : , videoEnabled_(account->isVideoEnabled())
59 : #endif
60 : {
61 148 : JAMI_LOG("[conf:{}] Creating conference", id_);
62 37 : duration_start_ = clock::now();
63 :
64 : #ifdef ENABLE_VIDEO
65 37 : setupVideoMixer();
66 : #endif
67 37 : registerProtocolHandlers();
68 :
69 : jami_tracepoint(conference_begin, id_.c_str());
70 37 : }
71 :
72 : #ifdef ENABLE_VIDEO
73 : void
74 37 : Conference::setupVideoMixer()
75 : {
76 37 : videoMixer_ = std::make_shared<video::VideoMixer>(id_);
77 37 : videoMixer_->setOnSourcesUpdated([this](std::vector<video::SourceInfo>&& infos) {
78 69 : runOnMainThread([w = weak(), infos = std::move(infos)]() mutable {
79 68 : if (auto shared = w.lock())
80 68 : shared->onVideoSourcesUpdated(std::move(infos));
81 68 : });
82 68 : });
83 :
84 37 : auto conf_res = split_string_to_unsigned(jami::Manager::instance().videoPreferences.getConferenceResolution(), 'x');
85 37 : if (conf_res.size() == 2u) {
86 : #if defined(__APPLE__) && TARGET_OS_MAC
87 : videoMixer_->setParameters(conf_res[0], conf_res[1], AV_PIX_FMT_NV12);
88 : #else
89 37 : videoMixer_->setParameters(conf_res[0], conf_res[1]);
90 : #endif
91 : } else {
92 0 : JAMI_ERROR("[conf:{}] Conference resolution is invalid", id_);
93 : }
94 37 : }
95 :
96 : void
97 68 : Conference::onVideoSourcesUpdated(const std::vector<video::SourceInfo>& infos)
98 : {
99 68 : auto acc = std::dynamic_pointer_cast<JamiAccount>(account_.lock());
100 68 : if (!acc)
101 0 : return;
102 :
103 68 : ConfInfo newInfo;
104 : {
105 68 : std::lock_guard lock(confInfoMutex_);
106 68 : newInfo.w = confInfo_.w;
107 68 : newInfo.h = confInfo_.h;
108 68 : newInfo.layout = confInfo_.layout;
109 68 : }
110 :
111 68 : bool hostAdded = false;
112 251 : for (const auto& info : infos) {
113 183 : if (!info.callId.empty()) {
114 115 : newInfo.emplace_back(createParticipantInfoFromRemoteSource(info));
115 : } else {
116 68 : newInfo.emplace_back(createParticipantInfoFromLocalSource(info, acc, hostAdded));
117 : }
118 : }
119 :
120 68 : if (auto videoMixer = videoMixer_) {
121 68 : newInfo.h = videoMixer->getHeight();
122 68 : newInfo.w = videoMixer->getWidth();
123 68 : }
124 :
125 68 : if (!hostAdded) {
126 1 : ParticipantInfo pi;
127 1 : pi.videoMuted = true;
128 1 : pi.audioLocalMuted = isMediaSourceMuted(MediaType::MEDIA_AUDIO);
129 1 : pi.isModerator = true;
130 1 : newInfo.emplace_back(pi);
131 1 : }
132 :
133 68 : updateConferenceInfo(std::move(newInfo));
134 68 : }
135 :
136 : ParticipantInfo
137 115 : Conference::createParticipantInfoFromRemoteSource(const video::SourceInfo& info)
138 : {
139 115 : ParticipantInfo participant;
140 115 : participant.x = info.x;
141 115 : participant.y = info.y;
142 115 : participant.w = info.w;
143 115 : participant.h = info.h;
144 115 : participant.videoMuted = !info.hasVideo;
145 :
146 115 : std::string callId = info.callId;
147 230 : if (auto call = std::dynamic_pointer_cast<SIPCall>(getCall(callId))) {
148 115 : participant.uri = call->getPeerNumber();
149 115 : participant.audioLocalMuted = call->isPeerMuted();
150 115 : participant.recording = call->isPeerRecording();
151 115 : if (auto* transport = call->getTransport())
152 115 : participant.device = transport->deviceId();
153 115 : }
154 :
155 115 : std::string_view peerId = string_remove_suffix(participant.uri, '@');
156 115 : participant.isModerator = isModerator(peerId);
157 115 : participant.handRaised = isHandRaised(participant.device);
158 115 : participant.audioModeratorMuted = isMuted(callId);
159 115 : participant.voiceActivity = isVoiceActive(info.streamId);
160 115 : participant.sinkId = info.streamId;
161 :
162 115 : if (auto videoMixer = videoMixer_)
163 115 : participant.active = videoMixer->verifyActive(info.streamId);
164 :
165 230 : return participant;
166 115 : }
167 :
168 : ParticipantInfo
169 68 : Conference::createParticipantInfoFromLocalSource(const video::SourceInfo& info,
170 : const std::shared_ptr<JamiAccount>& acc,
171 : bool& hostAdded)
172 : {
173 68 : ParticipantInfo participant;
174 68 : participant.x = info.x;
175 68 : participant.y = info.y;
176 68 : participant.w = info.w;
177 68 : participant.h = info.h;
178 68 : participant.videoMuted = !info.hasVideo;
179 :
180 68 : auto streamInfo = videoMixer_->streamInfo(info.source);
181 68 : std::string streamId = streamInfo.streamId;
182 :
183 68 : if (!streamId.empty()) {
184 : // Retrieve calls participants
185 : // TODO: this is a first version, we assume that the peer is not
186 : // a master of a conference and there is only one remote
187 : // In the future, we should retrieve confInfo from the call
188 : // To merge layout information
189 57 : participant.audioModeratorMuted = isMuted(streamId);
190 57 : if (auto videoMixer = videoMixer_)
191 57 : participant.active = videoMixer->verifyActive(streamId);
192 114 : if (auto call = std::dynamic_pointer_cast<SIPCall>(getCall(streamInfo.callId))) {
193 0 : participant.uri = call->getPeerNumber();
194 0 : participant.audioLocalMuted = call->isPeerMuted();
195 0 : participant.recording = call->isPeerRecording();
196 0 : if (auto* transport = call->getTransport())
197 0 : participant.device = transport->deviceId();
198 57 : }
199 : } else {
200 11 : streamId = sip_utils::streamId("", sip_utils::DEFAULT_VIDEO_STREAMID);
201 11 : if (auto videoMixer = videoMixer_)
202 11 : participant.active = videoMixer->verifyActive(streamId);
203 : }
204 :
205 68 : std::string_view peerId = string_remove_suffix(participant.uri, '@');
206 68 : participant.isModerator = isModerator(peerId);
207 :
208 : // Check if this is the local host
209 68 : if (participant.uri.empty() && !hostAdded) {
210 67 : hostAdded = true;
211 67 : participant.device = acc->currentDeviceId();
212 67 : participant.audioLocalMuted = isMediaSourceMuted(MediaType::MEDIA_AUDIO);
213 67 : participant.recording = isRecording();
214 : }
215 :
216 68 : participant.handRaised = isHandRaised(participant.device);
217 68 : participant.voiceActivity = isVoiceActive(streamId);
218 68 : participant.sinkId = std::move(streamId);
219 :
220 136 : return participant;
221 68 : }
222 : #endif
223 :
224 : void
225 37 : Conference::registerProtocolHandlers()
226 : {
227 37 : parser_.onVersion([&](uint32_t) {}); // TODO
228 43 : parser_.onCheckAuthorization([&](std::string_view peerId) { return isModerator(peerId); });
229 37 : parser_.onHangupParticipant(
230 0 : [&](const auto& accountUri, const auto& deviceId) { hangupParticipant(accountUri, deviceId); });
231 42 : parser_.onRaiseHand([&](const auto& deviceId, bool state) { setHandRaised(deviceId, state); });
232 37 : parser_.onSetActiveStream([&](const auto& streamId, bool state) { setActiveStream(streamId, state); });
233 37 : parser_.onMuteStreamAudio([&](const auto& accountUri, const auto& deviceId, const auto& streamId, bool state) {
234 0 : muteStream(accountUri, deviceId, streamId, state);
235 0 : });
236 37 : parser_.onSetLayout([&](int layout) { setLayout(layout); });
237 :
238 : // Version 0, deprecated
239 37 : parser_.onKickParticipant([&](const auto& participantId) { hangupParticipant(participantId); });
240 37 : parser_.onSetActiveParticipant([&](const auto& participantId) { setActiveParticipant(participantId); });
241 37 : parser_.onMuteParticipant([&](const auto& participantId, bool state) { muteParticipant(participantId, state); });
242 37 : parser_.onRaiseHandUri([&](const auto& uri, bool state) {
243 0 : if (auto call = std::dynamic_pointer_cast<SIPCall>(getCallFromPeerID(uri)))
244 0 : if (auto* transport = call->getTransport())
245 0 : setHandRaised(std::string(transport->deviceId()), state);
246 0 : });
247 :
248 37 : parser_.onVoiceActivity([&](const auto& streamId, bool state) { setVoiceActivity(streamId, state); });
249 37 : }
250 :
251 37 : Conference::~Conference()
252 : {
253 148 : JAMI_LOG("[conf:{}] Destroying conference", id_);
254 :
255 : #ifdef ENABLE_VIDEO
256 37 : auto videoManager = Manager::instance().getVideoManager();
257 37 : auto defaultDevice = videoManager ? videoManager->videoDeviceMonitor.getMRLForDefaultDevice() : std::string {};
258 41 : foreachCall([&](auto call) {
259 4 : call->exitConference();
260 : // Reset distant callInfo
261 4 : call->resetConfInfo();
262 : // Trigger the SIP negotiation to update the resolution for the remaining call
263 : // ideally this sould be done without renegotiation
264 4 : call->switchInput(defaultDevice);
265 :
266 : // Continue the recording for the call if the conference was recorded
267 4 : if (isRecording()) {
268 0 : JAMI_DEBUG("[conf:{}] Stopping recording", getConfId());
269 0 : toggleRecording();
270 0 : if (not call->isRecording()) {
271 0 : JAMI_DEBUG("[call:{}] Starting recording (conference was recorded)", call->getCallId());
272 0 : call->toggleRecording();
273 : }
274 : }
275 : // Notify that the remaining peer is still recording after conference
276 4 : if (call->isPeerRecording())
277 0 : call->peerRecording(true);
278 4 : });
279 37 : if (videoMixer_) {
280 37 : auto& sink = videoMixer_->getSink();
281 37 : for (auto it = confSinksMap_.begin(); it != confSinksMap_.end();) {
282 0 : sink->detach(it->second.get());
283 0 : it->second->stop();
284 0 : it = confSinksMap_.erase(it);
285 : }
286 : }
287 : #endif // ENABLE_VIDEO
288 : #ifdef ENABLE_PLUGIN
289 : {
290 37 : std::lock_guard lk(avStreamsMtx_);
291 37 : jami::Manager::instance().getJamiPluginManager().getCallServicesManager().clearCallHandlerMaps(getConfId());
292 37 : Manager::instance().getJamiPluginManager().getCallServicesManager().clearAVSubject(getConfId());
293 37 : confAVStreams.clear();
294 37 : }
295 : #endif // ENABLE_PLUGIN
296 37 : if (shutdownCb_)
297 14 : shutdownCb_(getDuration().count());
298 : // do not propagate sharing from conf host to calls
299 37 : closeMediaPlayer(mediaPlayerId_);
300 : jami_tracepoint(conference_end, id_.c_str());
301 37 : }
302 :
303 : Conference::State
304 930 : Conference::getState() const
305 : {
306 930 : return confState_;
307 : }
308 :
309 : void
310 83 : Conference::setState(State state)
311 : {
312 332 : JAMI_DEBUG("[conf:{}] State change: {} -> {}", id_, getStateStr(), getStateStr(state));
313 :
314 83 : confState_ = state;
315 83 : }
316 :
317 : void
318 26 : Conference::initSourcesForHost()
319 : {
320 26 : hostSources_.clear();
321 : // Setup local audio source
322 26 : MediaAttribute audioAttr;
323 26 : if (confState_ == State::ACTIVE_ATTACHED) {
324 0 : audioAttr = {MediaType::MEDIA_AUDIO, false, false, true, {}, sip_utils::DEFAULT_AUDIO_STREAMID};
325 : }
326 :
327 104 : JAMI_DEBUG("[conf:{}] Setting local host audio source: {}", id_, audioAttr.toString());
328 26 : hostSources_.emplace_back(audioAttr);
329 :
330 : #ifdef ENABLE_VIDEO
331 26 : if (isVideoEnabled()) {
332 26 : MediaAttribute videoAttr;
333 : // Setup local video source
334 26 : if (confState_ == State::ACTIVE_ATTACHED) {
335 : videoAttr = {MediaType::MEDIA_VIDEO,
336 : false,
337 : false,
338 : true,
339 0 : Manager::instance().getVideoManager()->videoDeviceMonitor.getMRLForDefaultDevice(),
340 0 : sip_utils::DEFAULT_VIDEO_STREAMID};
341 : }
342 104 : JAMI_DEBUG("[conf:{}] Setting local host video source: {}", id_, videoAttr.toString());
343 26 : hostSources_.emplace_back(videoAttr);
344 26 : }
345 : #endif
346 :
347 26 : reportMediaNegotiationStatus();
348 26 : }
349 :
350 : void
351 63 : Conference::reportMediaNegotiationStatus()
352 : {
353 63 : emitSignal<libjami::CallSignal::MediaNegotiationStatus>(
354 126 : getConfId(), libjami::Media::MediaNegotiationStatusEvents::NEGOTIATION_SUCCESS, currentMediaList());
355 63 : }
356 :
357 : std::vector<std::map<std::string, std::string>>
358 106 : Conference::currentMediaList() const
359 : {
360 106 : return MediaAttribute::mediaAttributesToMediaMaps(hostSources_);
361 : }
362 :
363 : #ifdef ENABLE_PLUGIN
364 : void
365 68 : Conference::createConfAVStreams()
366 : {
367 68 : std::string accountId = getAccountId();
368 :
369 0 : auto audioMap = [](const std::shared_ptr<jami::MediaFrame>& m) -> AVFrame* {
370 0 : return std::static_pointer_cast<AudioFrame>(m)->pointer();
371 : };
372 :
373 : // Preview and Received
374 68 : if ((audioMixer_ = jami::getAudioInput(getConfId()))) {
375 68 : auto audioSubject = std::make_shared<MediaStreamSubject>(audioMap);
376 68 : StreamData previewStreamData {getConfId(), false, StreamType::audio, getConfId(), accountId};
377 68 : createConfAVStream(previewStreamData, *audioMixer_, audioSubject);
378 68 : StreamData receivedStreamData {getConfId(), true, StreamType::audio, getConfId(), accountId};
379 68 : createConfAVStream(receivedStreamData, *audioMixer_, audioSubject);
380 68 : }
381 :
382 : #ifdef ENABLE_VIDEO
383 :
384 68 : if (videoMixer_) {
385 : // Review
386 68 : auto receiveSubject = std::make_shared<MediaStreamSubject>(pluginVideoMap_);
387 68 : StreamData receiveStreamData {getConfId(), true, StreamType::video, getConfId(), accountId};
388 68 : createConfAVStream(receiveStreamData, *videoMixer_, receiveSubject);
389 :
390 : // Preview
391 68 : if (auto videoPreview = videoMixer_->getVideoLocal()) {
392 51 : auto previewSubject = std::make_shared<MediaStreamSubject>(pluginVideoMap_);
393 51 : StreamData previewStreamData {getConfId(), false, StreamType::video, getConfId(), accountId};
394 51 : createConfAVStream(previewStreamData, *videoPreview, previewSubject);
395 119 : }
396 68 : }
397 : #endif // ENABLE_VIDEO
398 68 : }
399 :
400 : void
401 255 : Conference::createConfAVStream(const StreamData& StreamData,
402 : AVMediaStream& streamSource,
403 : const std::shared_ptr<MediaStreamSubject>& mediaStreamSubject,
404 : bool force)
405 : {
406 255 : std::lock_guard lk(avStreamsMtx_);
407 510 : const std::string AVStreamId = StreamData.id + std::to_string(static_cast<int>(StreamData.type))
408 510 : + std::to_string(StreamData.direction);
409 255 : auto it = confAVStreams.find(AVStreamId);
410 255 : if (!force && it != confAVStreams.end())
411 144 : return;
412 :
413 111 : confAVStreams.erase(AVStreamId);
414 111 : confAVStreams[AVStreamId] = mediaStreamSubject;
415 111 : streamSource.attachPriorityObserver(mediaStreamSubject);
416 111 : jami::Manager::instance().getJamiPluginManager().getCallServicesManager().createAVSubject(StreamData,
417 : mediaStreamSubject);
418 399 : }
419 : #endif // ENABLE_PLUGIN
420 :
421 : void
422 119 : Conference::setLocalHostMuteState(MediaType type, bool muted)
423 : {
424 343 : for (auto& source : hostSources_)
425 224 : if (source.type_ == type) {
426 118 : source.muted_ = muted;
427 : }
428 119 : }
429 :
430 : bool
431 385 : Conference::isMediaSourceMuted(MediaType type) const
432 : {
433 385 : if (getState() != State::ACTIVE_ATTACHED) {
434 : // Assume muted if not attached.
435 7 : return true;
436 : }
437 :
438 378 : if (type != MediaType::MEDIA_AUDIO and type != MediaType::MEDIA_VIDEO) {
439 0 : JAMI_ERROR("Unsupported media type");
440 0 : return true;
441 : }
442 :
443 : // Check only the primary (first) source of the given type.
444 : // Secondary sources (e.g. additional audio streams) being muted
445 : // should not affect the overall mute state of the host.
446 476 : for (const auto& source : hostSources_) {
447 463 : if (source.type_ == type) {
448 365 : if (source.type_ == MediaType::MEDIA_NONE) {
449 0 : JAMI_WARNING("The host source for {} is not set. The mute state is meaningless",
450 : source.mediaTypeToString(source.type_));
451 365 : return true;
452 : }
453 365 : return source.muted_;
454 : }
455 : }
456 : // No source of this type found so assume muted.
457 13 : return true;
458 : }
459 :
460 : void
461 68 : Conference::takeOverMediaSourceControl(const std::string& callId)
462 : {
463 68 : auto call = getCall(callId);
464 68 : if (not call) {
465 0 : JAMI_ERROR("[conf:{}] No call matches participant {}", id_, callId);
466 0 : return;
467 : }
468 :
469 68 : auto account = call->getAccount().lock();
470 68 : if (not account) {
471 0 : JAMI_ERROR("[conf:{}] No account detected for call {}", id_, callId);
472 0 : return;
473 : }
474 :
475 68 : auto mediaList = call->getMediaAttributeList();
476 :
477 68 : std::vector<MediaType> mediaTypeList {MediaType::MEDIA_AUDIO, MediaType::MEDIA_VIDEO};
478 :
479 204 : for (auto mediaType : mediaTypeList) {
480 : // Try to find a media with a valid source type
481 192 : auto check = [mediaType](auto const& mediaAttr) {
482 192 : return (mediaAttr.type_ == mediaType);
483 136 : };
484 :
485 136 : auto iter = std::find_if(mediaList.begin(), mediaList.end(), check);
486 :
487 136 : if (iter == mediaList.end()) {
488 : // Nothing to do if the call does not have a stream with
489 : // the requested media.
490 48 : JAMI_DEBUG("[conf:{}] Call {} does not have an active {} media source",
491 : id_,
492 : callId,
493 : MediaAttribute::mediaTypeToString(mediaType));
494 12 : continue;
495 12 : }
496 :
497 124 : if (getState() == State::ACTIVE_ATTACHED) {
498 : // To mute the local source, all the sources of the participating
499 : // calls must be muted. If it's the first participant, just use
500 : // its mute state.
501 118 : if (subCalls_.size() == 1) {
502 49 : setLocalHostMuteState(iter->type_, iter->muted_);
503 : } else {
504 69 : setLocalHostMuteState(iter->type_, iter->muted_ or isMediaSourceMuted(iter->type_));
505 : }
506 : }
507 : }
508 :
509 : // Update the media states in the newly added call.
510 68 : call->requestMediaChange(MediaAttribute::mediaAttributesToMediaMaps(mediaList));
511 :
512 : // Notify the client
513 204 : for (auto mediaType : mediaTypeList) {
514 136 : if (mediaType == MediaType::MEDIA_AUDIO) {
515 68 : bool muted = isMediaSourceMuted(MediaType::MEDIA_AUDIO);
516 272 : JAMI_DEBUG("[conf:{}] Taking over audio control from call {} - current state: {}",
517 : id_,
518 : callId,
519 : muted ? "muted" : "unmuted");
520 68 : emitSignal<libjami::CallSignal::AudioMuted>(id_, muted);
521 : } else {
522 68 : bool muted = isMediaSourceMuted(MediaType::MEDIA_VIDEO);
523 272 : JAMI_DEBUG("[conf:{}] Taking over video control from call {} - current state: {}",
524 : id_,
525 : callId,
526 : muted ? "muted" : "unmuted");
527 68 : emitSignal<libjami::CallSignal::VideoMuted>(id_, muted);
528 : }
529 : }
530 68 : }
531 :
532 : bool
533 37 : Conference::requestMediaChange(const std::vector<libjami::MediaMap>& mediaList)
534 : {
535 37 : if (getState() != State::ACTIVE_ATTACHED) {
536 0 : JAMI_ERROR("[conf {}] Request media change can be performed only in attached mode", getConfId());
537 0 : return false;
538 : }
539 :
540 148 : JAMI_DEBUG("[conf:{}] Processing media change request", getConfId());
541 :
542 37 : auto mediaAttrList = MediaAttribute::buildMediaAttributesList(mediaList, false);
543 :
544 : #ifdef ENABLE_VIDEO
545 : // Check if the host previously had video
546 37 : bool hostHadVideo = MediaAttribute::hasMediaType(hostSources_, MediaType::MEDIA_VIDEO)
547 37 : && !isMediaSourceMuted(MediaType::MEDIA_VIDEO);
548 : // Check if the host will have video after this change
549 37 : bool hostWillHaveVideo = false;
550 75 : for (const auto& media : mediaAttrList) {
551 67 : if (media.type_ == MediaType::MEDIA_VIDEO && media.enabled_ && !media.muted_) {
552 29 : hostWillHaveVideo = true;
553 29 : break;
554 : }
555 : }
556 : #endif
557 :
558 37 : bool hasFileSharing {false};
559 105 : for (const auto& media : mediaAttrList) {
560 68 : if (!media.enabled_ || media.sourceUri_.empty())
561 68 : continue;
562 :
563 : // Supported MRL schemes
564 3 : static const std::string sep = libjami::Media::VideoProtocolPrefix::SEPARATOR;
565 :
566 3 : const auto pos = media.sourceUri_.find(sep);
567 3 : if (pos == std::string::npos)
568 3 : continue;
569 :
570 0 : const auto prefix = media.sourceUri_.substr(0, pos);
571 0 : if ((pos + sep.size()) >= media.sourceUri_.size())
572 0 : continue;
573 :
574 0 : if (prefix == libjami::Media::VideoProtocolPrefix::FILE) {
575 0 : hasFileSharing = true;
576 0 : mediaPlayerId_ = media.sourceUri_;
577 0 : createMediaPlayer(mediaPlayerId_);
578 : }
579 0 : }
580 :
581 37 : if (!hasFileSharing) {
582 37 : closeMediaPlayer(mediaPlayerId_);
583 37 : mediaPlayerId_ = "";
584 : }
585 :
586 105 : for (auto const& mediaAttr : mediaAttrList) {
587 272 : JAMI_DEBUG("[conf:{}] Requested media: {}", getConfId(), mediaAttr.toString(true));
588 : }
589 :
590 37 : std::vector<std::string> newVideoInputs;
591 105 : for (auto const& mediaAttr : mediaAttrList) {
592 : // Find media
593 68 : auto oldIdx = std::find_if(hostSources_.begin(), hostSources_.end(), [&](auto oldAttr) {
594 9 : return oldAttr.label_ == mediaAttr.label_;
595 : });
596 : // If video, add to newVideoInputs
597 : #ifdef ENABLE_VIDEO
598 68 : if (mediaAttr.type_ == MediaType::MEDIA_VIDEO) {
599 31 : auto srcUri = mediaAttr.sourceUri_;
600 : // If no sourceUri, use the default video device
601 31 : if (srcUri.empty()) {
602 28 : if (auto vm = Manager::instance().getVideoManager())
603 28 : srcUri = vm->videoDeviceMonitor.getMRLForDefaultDevice();
604 : else
605 0 : continue;
606 : }
607 31 : if (!mediaAttr.muted_)
608 30 : newVideoInputs.emplace_back(std::move(srcUri));
609 31 : } else {
610 : #endif
611 37 : hostAudioInputs_[mediaAttr.label_] = jami::getAudioInput(mediaAttr.label_);
612 : #ifdef ENABLE_VIDEO
613 : }
614 : #endif
615 68 : if (oldIdx != hostSources_.end()) {
616 : // Check if muted status changes
617 5 : if (mediaAttr.muted_ != oldIdx->muted_) {
618 : // Secondary audio sources (e.g. screenshare audio) must be
619 : // handled per-stream. The global muteLocalHost() would
620 : // mute/unmute ALL audio sources (including the microphone),
621 : // so we skip it here and let bindHostAudio() apply the
622 : // per-source mute state after hostSources_ is updated.
623 1 : if (mediaAttr.type_ == MediaType::MEDIA_AUDIO && mediaAttr.label_ != sip_utils::DEFAULT_AUDIO_STREAMID) {
624 0 : JAMI_DEBUG("[conf:{}] Secondary audio mute handled per-stream", getConfId());
625 : } else {
626 2 : muteLocalHost(mediaAttr.muted_,
627 1 : mediaAttr.type_ == MediaType::MEDIA_AUDIO
628 : ? libjami::Media::Details::MEDIA_TYPE_AUDIO
629 : : libjami::Media::Details::MEDIA_TYPE_VIDEO);
630 : }
631 : }
632 : }
633 : }
634 :
635 : #ifdef ENABLE_VIDEO
636 37 : if (videoMixer_) {
637 37 : if (newVideoInputs.empty()) {
638 8 : videoMixer_->addAudioOnlySource("", sip_utils::streamId("", sip_utils::DEFAULT_AUDIO_STREAMID));
639 : } else {
640 29 : videoMixer_->switchInputs(newVideoInputs);
641 : }
642 : }
643 : #endif
644 37 : hostSources_ = mediaAttrList; // New medias
645 37 : if (!isMuted("host"sv) && !isMediaSourceMuted(MediaType::MEDIA_AUDIO))
646 34 : bindHostAudio();
647 :
648 : #ifdef ENABLE_VIDEO
649 : // If the host is adding video (didn't have video before, has video now),
650 : // we need to ensure all subcalls also have video negotiated so they can
651 : // receive the mixed video stream.
652 37 : if (!hostHadVideo && hostWillHaveVideo) {
653 108 : JAMI_DEBUG("[conf:{}] Host added video, negotiating video with all subcalls", getConfId());
654 27 : negotiateVideoWithSubcalls();
655 : }
656 : #endif
657 :
658 : // Inform the client about the media negotiation status.
659 37 : reportMediaNegotiationStatus();
660 37 : return true;
661 37 : }
662 :
663 : void
664 2 : Conference::handleMediaChangeRequest(const std::shared_ptr<Call>& call,
665 : const std::vector<libjami::MediaMap>& remoteMediaList)
666 : {
667 8 : JAMI_DEBUG("[conf:{}] Answering media change request from call {}", getConfId(), call->getCallId());
668 2 : auto currentMediaList = hostSources_;
669 :
670 : #ifdef ENABLE_VIDEO
671 : // Check if the participant previously had video
672 2 : auto previousMediaList = call->getMediaAttributeList();
673 2 : bool participantHadVideo = MediaAttribute::hasMediaType(previousMediaList, MediaType::MEDIA_VIDEO);
674 :
675 : // If the new media list has video, remove the participant from audioonlylist.
676 : auto participantWillHaveVideo
677 2 : = MediaAttribute::hasMediaType(MediaAttribute::buildMediaAttributesList(remoteMediaList, false),
678 2 : MediaType::MEDIA_VIDEO);
679 8 : JAMI_DEBUG(
680 : "[conf:{}] [call:{}] remoteHasVideo={}, removing from audio-only sources BEFORE media negotiation completes",
681 : getConfId(),
682 : call->getCallId(),
683 : participantWillHaveVideo);
684 2 : if (videoMixer_ && participantWillHaveVideo) {
685 2 : auto callId = call->getCallId();
686 2 : auto audioStreamId = sip_utils::streamId(callId, sip_utils::DEFAULT_AUDIO_STREAMID);
687 8 : JAMI_WARNING("[conf:{}] [call:{}] Removing audio-only source '{}' - participant may briefly disappear from "
688 : "layout until video is attached",
689 : getConfId(),
690 : callId,
691 : audioStreamId);
692 2 : videoMixer_->removeAudioOnlySource(callId, audioStreamId);
693 2 : }
694 : #endif
695 :
696 2 : auto remoteList = remoteMediaList;
697 7 : for (auto it = remoteList.begin(); it != remoteList.end();) {
698 15 : if (it->at(libjami::Media::MediaAttributeKey::MUTED) == TRUE_STR
699 15 : or it->at(libjami::Media::MediaAttributeKey::ENABLED) == FALSE_STR) {
700 0 : it = remoteList.erase(it);
701 : } else {
702 5 : ++it;
703 : }
704 : }
705 : // Create minimum media list (ignore muted and disabled medias)
706 2 : std::vector<libjami::MediaMap> newMediaList;
707 2 : newMediaList.reserve(remoteMediaList.size());
708 6 : for (auto const& media : currentMediaList) {
709 4 : if (media.enabled_ and not media.muted_)
710 4 : newMediaList.emplace_back(MediaAttribute::toMediaMap(media));
711 : }
712 3 : for (auto idx = newMediaList.size(); idx < remoteMediaList.size(); idx++)
713 1 : newMediaList.emplace_back(remoteMediaList[idx]);
714 :
715 : // NOTE:
716 : // Since this is a conference, newly added media will be also
717 : // accepted.
718 : // This also means that if original call was an audio-only call,
719 : // the local camera will be enabled, unless the video is disabled
720 : // in the account settings.
721 2 : call->answerMediaChangeRequest(newMediaList);
722 2 : call->enterConference(shared_from_this());
723 :
724 : // Rebind audio after media renegotiation so that any newly added
725 : // audio streams are wired into the conference mixing mesh.
726 2 : unbindSubCallAudio(call->getCallId());
727 2 : bindSubCallAudio(call->getCallId());
728 :
729 : #ifdef ENABLE_VIDEO
730 : // If a participant is adding video (didn't have it before, has it now),
731 : // we need to make sure all other subcalls also have video negotiated so they
732 : // can receive the mixed video stream that now includes the new participant's video
733 2 : if (!participantHadVideo && participantWillHaveVideo) {
734 0 : JAMI_DEBUG("[conf:{}] [call:{}] Participant added video, negotiating video with other subcalls",
735 : getConfId(),
736 : call->getCallId());
737 0 : negotiateVideoWithSubcalls(call->getCallId());
738 : }
739 : #endif
740 2 : }
741 :
742 : void
743 68 : Conference::addSubCall(const std::string& callId)
744 : {
745 272 : JAMI_DEBUG("[conf:{}] Adding call {}", id_, callId);
746 :
747 : jami_tracepoint(conference_add_participant, id_.c_str(), callId.c_str());
748 :
749 : {
750 68 : std::lock_guard lk(subcallsMtx_);
751 68 : if (!subCalls_.insert(callId).second)
752 0 : return;
753 68 : }
754 :
755 136 : if (auto call = std::dynamic_pointer_cast<SIPCall>(getCall(callId))) {
756 : // Check if participant was muted before conference
757 68 : if (call->isPeerMuted())
758 0 : participantsMuted_.emplace(call->getCallId());
759 :
760 : // NOTE:
761 : // When a call joins a conference, the media source of the call
762 : // will be set to the output of the conference mixer.
763 68 : takeOverMediaSourceControl(callId);
764 68 : auto w = call->getAccount();
765 68 : auto account = w.lock();
766 68 : if (account) {
767 : // Add defined moderators for the account link to the call
768 68 : for (const auto& mod : account->getDefaultModerators()) {
769 0 : moderators_.emplace(mod);
770 68 : }
771 :
772 : // Check for localModeratorsEnabled preference
773 68 : if (account->isLocalModeratorsEnabled() && not localModAdded_) {
774 30 : auto accounts = jami::Manager::instance().getAllAccounts<JamiAccount>();
775 143 : for (const auto& account : accounts) {
776 113 : moderators_.emplace(account->getUsername());
777 : }
778 30 : localModAdded_ = true;
779 30 : }
780 :
781 : // Check for allModeratorEnabled preference
782 68 : if (account->isAllModerators())
783 68 : moderators_.emplace(getRemoteId(call));
784 : }
785 : #ifdef ENABLE_VIDEO
786 : // In conference, if a participant joins with an audio only
787 : // call, it must be listed in the audioonlylist.
788 68 : auto mediaList = call->getMediaAttributeList();
789 68 : if (call->peerUri().find("swarm:") != 0) { // We're hosting so it's already ourself.
790 68 : if (videoMixer_ && not MediaAttribute::hasMediaType(mediaList, MediaType::MEDIA_VIDEO)) {
791 : // Normally not called, as video stream is added for audio-only answers.
792 : // The audio-only source will be added in VideoRtpSession startReceiver,
793 : // after ICE negotiation, when peers can properly create video sinks.
794 24 : videoMixer_->addAudioOnlySource(call->getCallId(),
795 24 : sip_utils::streamId(call->getCallId(),
796 : sip_utils::DEFAULT_AUDIO_STREAMID));
797 : }
798 : }
799 68 : call->enterConference(shared_from_this());
800 : // Continue the recording for the conference if one participant was recording
801 68 : if (call->isRecording()) {
802 0 : JAMI_DEBUG("[call:{}] Stopping recording", call->getCallId());
803 0 : call->toggleRecording();
804 0 : if (not this->isRecording()) {
805 0 : JAMI_DEBUG("[conf:{}] Starting recording (participant was recording)", getConfId());
806 0 : this->toggleRecording();
807 : }
808 : }
809 68 : bindSubCallAudio(callId);
810 : #endif // ENABLE_VIDEO
811 68 : } else
812 68 : JAMI_ERROR("[conf:{}] No call associated with participant {}", id_, callId);
813 : #ifdef ENABLE_PLUGIN
814 68 : createConfAVStreams();
815 : #endif
816 : }
817 :
818 : void
819 64 : Conference::removeSubCall(const std::string& callId)
820 : {
821 256 : JAMI_DEBUG("[conf:{}] Removing call {}", id_, callId);
822 : {
823 64 : std::lock_guard lk(subcallsMtx_);
824 64 : if (!subCalls_.erase(callId))
825 0 : return;
826 64 : }
827 :
828 64 : clearParticipantData(callId);
829 :
830 128 : if (auto call = std::dynamic_pointer_cast<SIPCall>(getCall(callId))) {
831 : #ifdef ENABLE_VIDEO
832 64 : if (videoMixer_) {
833 181 : for (auto const& rtpSession : call->getRtpSessionList()) {
834 117 : if (rtpSession->getMediaType() == MediaType::MEDIA_AUDIO)
835 64 : videoMixer_->removeAudioOnlySource(callId, rtpSession->streamId());
836 117 : if (videoMixer_->verifyActive(rtpSession->streamId()))
837 1 : videoMixer_->resetActiveStream();
838 64 : }
839 : }
840 : #endif // ENABLE_VIDEO
841 64 : unbindSubCallAudio(callId);
842 64 : call->exitConference();
843 64 : if (call->isPeerRecording())
844 0 : call->peerRecording(false);
845 64 : }
846 : }
847 :
848 : #ifdef ENABLE_VIDEO
849 : void
850 27 : Conference::negotiateVideoWithSubcalls(const std::string& excludeCallId)
851 : {
852 27 : if (!isVideoEnabled()) {
853 0 : JAMI_DEBUG("[conf:{}] Video is disabled in account, skipping subcall video negotiation", id_);
854 0 : return;
855 : }
856 :
857 108 : JAMI_DEBUG("[conf:{}] Negotiating video with subcalls (excluding: {})",
858 : id_,
859 : excludeCallId.empty() ? "none" : excludeCallId);
860 :
861 27 : for (const auto& callId : getSubCalls()) {
862 0 : if (callId == excludeCallId) {
863 0 : continue;
864 : }
865 :
866 0 : auto call = std::dynamic_pointer_cast<SIPCall>(getCall(callId));
867 0 : if (!call) {
868 0 : continue;
869 : }
870 :
871 0 : auto mediaList = call->getMediaAttributeList();
872 0 : auto videoIt = std::find_if(mediaList.begin(), mediaList.end(), [](const auto& media) {
873 0 : return media.type_ == MediaType::MEDIA_VIDEO;
874 : });
875 :
876 0 : if (videoIt == mediaList.end()) {
877 0 : JAMI_DEBUG("[conf:{}] [call:{}] Call does not have video, triggering renegotiation to add video",
878 : id_,
879 : callId);
880 :
881 0 : MediaAttribute videoAttr;
882 0 : videoAttr.type_ = MediaType::MEDIA_VIDEO;
883 0 : videoAttr.enabled_ = true;
884 0 : videoAttr.muted_ = false;
885 0 : videoAttr.label_ = sip_utils::DEFAULT_VIDEO_STREAMID;
886 : // Source not needed because the mixer becomes the data source for the video stream
887 0 : videoAttr.sourceUri_.clear();
888 :
889 0 : mediaList.emplace_back(videoAttr);
890 :
891 0 : call->requestMediaChange(MediaAttribute::mediaAttributesToMediaMaps(mediaList));
892 0 : call->enterConference(shared_from_this());
893 0 : } else {
894 0 : bool needsUpdate = false;
895 0 : if (!videoIt->enabled_) {
896 0 : videoIt->enabled_ = true;
897 0 : needsUpdate = true;
898 : }
899 0 : if (videoIt->muted_) {
900 0 : videoIt->muted_ = false;
901 0 : needsUpdate = true;
902 : }
903 0 : if (!videoIt->sourceUri_.empty()) {
904 : // Source not needed because the mixer becomes the data source for the video stream
905 0 : videoIt->sourceUri_.clear();
906 0 : needsUpdate = true;
907 : }
908 :
909 0 : if (needsUpdate) {
910 0 : JAMI_DEBUG("[conf:{}] [call:{}] Unmuting existing video stream for renegotiation", id_, callId);
911 0 : call->requestMediaChange(MediaAttribute::mediaAttributesToMediaMaps(mediaList));
912 0 : call->enterConference(shared_from_this());
913 : }
914 : }
915 27 : }
916 : }
917 : #endif
918 :
919 : void
920 0 : Conference::setActiveParticipant(const std::string& participant_id)
921 : {
922 : #ifdef ENABLE_VIDEO
923 0 : if (!videoMixer_)
924 0 : return;
925 0 : if (isHost(participant_id)) {
926 0 : videoMixer_->setActiveStream(sip_utils::streamId("", sip_utils::DEFAULT_VIDEO_STREAMID));
927 0 : return;
928 : }
929 0 : if (auto call = getCallFromPeerID(participant_id)) {
930 0 : videoMixer_->setActiveStream(sip_utils::streamId(call->getCallId(), sip_utils::DEFAULT_VIDEO_STREAMID));
931 0 : return;
932 0 : }
933 :
934 0 : auto remoteHost = findHostforRemoteParticipant(participant_id);
935 0 : if (not remoteHost.empty()) {
936 : // This logic will be handled client side
937 0 : return;
938 : }
939 : // Unset active participant by default
940 0 : videoMixer_->resetActiveStream();
941 : #endif
942 : }
943 :
944 : void
945 1 : Conference::setActiveStream(const std::string& streamId, bool state)
946 : {
947 : #ifdef ENABLE_VIDEO
948 1 : if (!videoMixer_)
949 0 : return;
950 1 : if (state)
951 1 : videoMixer_->setActiveStream(streamId);
952 : else
953 0 : videoMixer_->resetActiveStream();
954 : #endif
955 : }
956 :
957 : void
958 0 : Conference::setLayout(int layout)
959 : {
960 : #ifdef ENABLE_VIDEO
961 0 : if (layout < 0 || layout > 2) {
962 0 : JAMI_ERROR("[conf:{}] Unknown layout {}", id_, layout);
963 0 : return;
964 : }
965 0 : if (!videoMixer_)
966 0 : return;
967 : {
968 0 : std::lock_guard lk(confInfoMutex_);
969 0 : confInfo_.layout = layout;
970 0 : }
971 0 : videoMixer_->setVideoLayout(static_cast<video::Layout>(layout));
972 : #endif
973 : }
974 :
975 : std::vector<std::map<std::string, std::string>>
976 438 : ConfInfo::toVectorMapStringString() const
977 : {
978 438 : std::vector<std::map<std::string, std::string>> infos;
979 438 : infos.reserve(size());
980 1502 : for (const auto& info : *this)
981 1064 : infos.emplace_back(info.toMap());
982 438 : return infos;
983 0 : }
984 :
985 : std::string
986 225 : ConfInfo::toString() const
987 : {
988 225 : Json::Value val = {};
989 855 : for (const auto& info : *this) {
990 633 : val["p"].append(info.toJson());
991 : }
992 223 : val["w"] = w;
993 225 : val["h"] = h;
994 225 : val["v"] = v;
995 225 : val["layout"] = layout;
996 449 : return json::toString(val);
997 225 : }
998 :
999 : void
1000 150 : Conference::sendConferenceInfos()
1001 : {
1002 : // Inform calls that the layout has changed
1003 150 : foreachCall([&](auto call) {
1004 : // Produce specific JSON for each participant (2 separate accounts can host ...
1005 : // a conference on a same device, the conference is not link to one account).
1006 225 : auto w = call->getAccount();
1007 225 : auto account = w.lock();
1008 225 : if (!account)
1009 0 : return;
1010 :
1011 675 : dht::ThreadPool::io().run(
1012 450 : [call, confInfo = getConfInfoHostUri(account->getUsername() + "@ring.dht", call->getPeerNumber())] {
1013 225 : call->sendConfInfo(confInfo.toString());
1014 : });
1015 225 : });
1016 :
1017 150 : auto confInfo = getConfInfoHostUri("", "");
1018 : #ifdef ENABLE_VIDEO
1019 150 : createSinks(confInfo);
1020 : #endif
1021 :
1022 : // Inform client that layout has changed
1023 150 : jami::emitSignal<libjami::CallSignal::OnConferenceInfosUpdated>(id_, confInfo.toVectorMapStringString());
1024 150 : }
1025 :
1026 : #ifdef ENABLE_VIDEO
1027 : void
1028 150 : Conference::createSinks(const ConfInfo& infos)
1029 : {
1030 150 : std::lock_guard lk(sinksMtx_);
1031 150 : if (!videoMixer_)
1032 0 : return;
1033 150 : auto& sink = videoMixer_->getSink();
1034 450 : Manager::instance().createSinkClients(getConfId(),
1035 : infos,
1036 : {std::static_pointer_cast<video::VideoFrameActiveWriter>(sink)},
1037 150 : confSinksMap_,
1038 300 : getAccountId());
1039 150 : }
1040 : #endif
1041 :
1042 : void
1043 46 : Conference::attachHost(const std::vector<libjami::MediaMap>& mediaList)
1044 : {
1045 184 : JAMI_DEBUG("[conf:{}] Attaching host", id_);
1046 :
1047 46 : if (getState() == State::ACTIVE_DETACHED) {
1048 34 : setState(State::ACTIVE_ATTACHED);
1049 34 : if (mediaList.empty()) {
1050 0 : JAMI_DEBUG("[conf:{}] Empty media list, initializing default sources", id_);
1051 0 : initSourcesForHost();
1052 0 : bindHostAudio();
1053 : #ifdef ENABLE_VIDEO
1054 0 : if (videoMixer_) {
1055 0 : std::vector<std::string> videoInputs;
1056 0 : for (const auto& source : hostSources_) {
1057 0 : if (source.type_ == MediaType::MEDIA_VIDEO)
1058 0 : videoInputs.emplace_back(source.sourceUri_);
1059 : }
1060 0 : if (videoInputs.empty()) {
1061 0 : videoMixer_->addAudioOnlySource("", sip_utils::streamId("", sip_utils::DEFAULT_AUDIO_STREAMID));
1062 : } else {
1063 0 : videoMixer_->switchInputs(videoInputs);
1064 : }
1065 0 : }
1066 : #endif
1067 : } else {
1068 34 : requestMediaChange(mediaList);
1069 : }
1070 : } else {
1071 48 : JAMI_WARNING("[conf:{}] Invalid conference state in attach participant: current \"{}\" - expected \"{}\"",
1072 : id_,
1073 : getStateStr(),
1074 : "ACTIVE_DETACHED");
1075 : }
1076 46 : }
1077 :
1078 : void
1079 28 : Conference::detachHost()
1080 : {
1081 112 : JAMI_LOG("[conf:{}] Detaching host", id_);
1082 :
1083 28 : lastMediaList_ = currentMediaList();
1084 :
1085 28 : if (getState() == State::ACTIVE_ATTACHED) {
1086 26 : unbindHostAudio();
1087 :
1088 : #ifdef ENABLE_VIDEO
1089 26 : if (videoMixer_)
1090 26 : videoMixer_->stopInputs();
1091 : #endif
1092 : } else {
1093 8 : JAMI_WARNING("[conf:{}] Invalid conference state in detach participant: current \"{}\" - expected \"{}\"",
1094 : id_,
1095 : getStateStr(),
1096 : "ACTIVE_ATTACHED");
1097 2 : return;
1098 : }
1099 :
1100 26 : setState(State::ACTIVE_DETACHED);
1101 26 : initSourcesForHost();
1102 : }
1103 :
1104 : CallIdSet
1105 451 : Conference::getSubCalls() const
1106 : {
1107 451 : std::lock_guard lk(subcallsMtx_);
1108 902 : return subCalls_;
1109 451 : }
1110 :
1111 : bool
1112 2 : Conference::toggleRecording()
1113 : {
1114 2 : bool newState = not isRecording();
1115 2 : if (newState)
1116 1 : initRecorder(recorder_);
1117 1 : else if (recorder_)
1118 1 : deinitRecorder(recorder_);
1119 :
1120 : // Notify each participant
1121 6 : foreachCall([&](auto call) { call->updateRecState(newState); });
1122 :
1123 2 : auto res = Recordable::toggleRecording();
1124 2 : updateRecording();
1125 2 : return res;
1126 : }
1127 :
1128 : std::string
1129 294 : Conference::getAccountId() const
1130 : {
1131 294 : if (auto account = getAccount())
1132 294 : return account->getAccountID();
1133 0 : return {};
1134 : }
1135 :
1136 : void
1137 0 : Conference::switchInput(const std::string& input)
1138 : {
1139 : #ifdef ENABLE_VIDEO
1140 0 : JAMI_DEBUG("[conf:{}] Switching video input to {}", id_, input);
1141 0 : std::vector<MediaAttribute> newSources;
1142 0 : auto firstVideo = true;
1143 : // Rewrite hostSources (remove all except one video input)
1144 : // This method is replaced by requestMediaChange
1145 0 : for (auto& source : hostSources_) {
1146 0 : if (source.type_ == MediaType::MEDIA_VIDEO) {
1147 0 : if (firstVideo) {
1148 0 : firstVideo = false;
1149 0 : source.sourceUri_ = input;
1150 0 : newSources.emplace_back(source);
1151 : }
1152 : } else {
1153 0 : newSources.emplace_back(source);
1154 : }
1155 : }
1156 :
1157 : // Done if the video is disabled
1158 0 : if (not isVideoEnabled())
1159 0 : return;
1160 :
1161 0 : if (auto mixer = videoMixer_) {
1162 0 : mixer->switchInputs({input});
1163 : #ifdef ENABLE_PLUGIN
1164 : // Preview
1165 0 : if (auto videoPreview = mixer->getVideoLocal()) {
1166 0 : auto previewSubject = std::make_shared<MediaStreamSubject>(pluginVideoMap_);
1167 0 : StreamData previewStreamData {getConfId(), false, StreamType::video, getConfId(), getAccountId()};
1168 0 : createConfAVStream(previewStreamData, *videoPreview, previewSubject, true);
1169 0 : }
1170 : #endif
1171 0 : }
1172 : #endif
1173 0 : }
1174 :
1175 : bool
1176 123 : Conference::isVideoEnabled() const
1177 : {
1178 123 : if (auto shared = account_.lock())
1179 123 : return shared->isVideoEnabled();
1180 0 : return false;
1181 : }
1182 :
1183 : #ifdef ENABLE_VIDEO
1184 : std::shared_ptr<video::VideoMixer>
1185 111 : Conference::getVideoMixer()
1186 : {
1187 111 : return videoMixer_;
1188 : }
1189 :
1190 : std::string
1191 0 : Conference::getVideoInput() const
1192 : {
1193 0 : for (const auto& source : hostSources_) {
1194 0 : if (source.type_ == MediaType::MEDIA_VIDEO)
1195 0 : return source.sourceUri_;
1196 : }
1197 0 : return {};
1198 : }
1199 : #endif
1200 :
1201 : void
1202 1 : Conference::initRecorder(std::shared_ptr<MediaRecorder>& rec)
1203 : {
1204 : #ifdef ENABLE_VIDEO
1205 : // Video
1206 1 : if (videoMixer_) {
1207 1 : if (auto ob = rec->addStream(videoMixer_->getStream("v:mixer"))) {
1208 1 : videoMixer_->attach(ob);
1209 : }
1210 : }
1211 : #endif
1212 :
1213 : // Audio
1214 : // Create ghost participant for ringbufferpool
1215 1 : auto& rbPool = Manager::instance().getRingBufferPool();
1216 1 : ghostRingBuffer_ = rbPool.createRingBuffer(getConfId());
1217 :
1218 : // Bind it to ringbufferpool in order to get the all mixed frames
1219 1 : bindSubCallAudio(getConfId());
1220 :
1221 : // Add stream to recorder
1222 1 : audioMixer_ = jami::getAudioInput(getConfId());
1223 1 : if (auto ob = rec->addStream(audioMixer_->getInfo("a:mixer"))) {
1224 1 : audioMixer_->attach(ob);
1225 : }
1226 1 : }
1227 :
1228 : void
1229 1 : Conference::deinitRecorder(std::shared_ptr<MediaRecorder>& rec)
1230 : {
1231 : #ifdef ENABLE_VIDEO
1232 : // Video
1233 1 : if (videoMixer_) {
1234 1 : if (auto ob = rec->getStream("v:mixer")) {
1235 1 : videoMixer_->detach(ob);
1236 : }
1237 : }
1238 : #endif
1239 :
1240 : // Audio
1241 1 : if (auto ob = rec->getStream("a:mixer"))
1242 1 : audioMixer_->detach(ob);
1243 1 : audioMixer_.reset();
1244 1 : Manager::instance().getRingBufferPool().unBindAll(getConfId());
1245 1 : ghostRingBuffer_.reset();
1246 1 : }
1247 :
1248 : void
1249 6 : Conference::onConfOrder(const std::string& callId, const std::string& confOrder)
1250 : {
1251 : // Check if the peer is a master
1252 6 : if (auto call = getCall(callId)) {
1253 6 : const auto& peerId = getRemoteId(call);
1254 6 : Json::Value root;
1255 6 : if (!json::parse(confOrder, root)) {
1256 0 : JAMI_WARNING("[conf:{}] Unable to parse conference order from {}", id_, peerId);
1257 0 : return;
1258 : }
1259 :
1260 6 : parser_.initData(std::move(root), peerId);
1261 6 : parser_.parse();
1262 12 : }
1263 : }
1264 :
1265 : std::shared_ptr<Call>
1266 927 : Conference::getCall(const std::string& callId)
1267 : {
1268 927 : return Manager::instance().callFactory.getCall(callId);
1269 : }
1270 :
1271 : bool
1272 196 : Conference::isModerator(std::string_view uri) const
1273 : {
1274 196 : return moderators_.find(uri) != moderators_.end() or isHost(uri);
1275 : }
1276 :
1277 : bool
1278 220 : Conference::isHandRaised(std::string_view deviceId) const
1279 : {
1280 366 : return isHostDevice(deviceId) ? handsRaised_.find("host"sv) != handsRaised_.end()
1281 366 : : handsRaised_.find(deviceId) != handsRaised_.end();
1282 : }
1283 :
1284 : void
1285 7 : Conference::setHandRaised(const std::string& deviceId, const bool& state)
1286 : {
1287 7 : if (isHostDevice(deviceId)) {
1288 0 : auto isPeerRequiringAttention = isHandRaised("host"sv);
1289 0 : if (state and not isPeerRequiringAttention) {
1290 0 : handsRaised_.emplace("host"sv);
1291 0 : updateHandsRaised();
1292 0 : } else if (not state and isPeerRequiringAttention) {
1293 0 : handsRaised_.erase("host");
1294 0 : updateHandsRaised();
1295 : }
1296 : } else {
1297 11 : for (const auto& p : getSubCalls()) {
1298 22 : if (auto call = std::dynamic_pointer_cast<SIPCall>(getCall(p))) {
1299 11 : auto isPeerRequiringAttention = isHandRaised(deviceId);
1300 11 : std::string callDeviceId;
1301 11 : if (auto* transport = call->getTransport())
1302 11 : callDeviceId = transport->deviceId();
1303 11 : if (deviceId == callDeviceId) {
1304 7 : if (state and not isPeerRequiringAttention) {
1305 4 : handsRaised_.emplace(deviceId);
1306 4 : updateHandsRaised();
1307 3 : } else if (not state and isPeerRequiringAttention) {
1308 3 : handsRaised_.erase(deviceId);
1309 3 : updateHandsRaised();
1310 : }
1311 7 : return;
1312 : }
1313 22 : }
1314 7 : }
1315 0 : JAMI_WARNING("[conf:{}] Failed to set hand raised for {} (participant not found)", id_, deviceId);
1316 : }
1317 : }
1318 :
1319 : bool
1320 183 : Conference::isVoiceActive(std::string_view streamId) const
1321 : {
1322 183 : return streamsVoiceActive.find(streamId) != streamsVoiceActive.end();
1323 : }
1324 :
1325 : void
1326 0 : Conference::setVoiceActivity(const std::string& streamId, const bool& newState)
1327 : {
1328 : // verify that streamID exists in our confInfo
1329 0 : bool exists = false;
1330 0 : for (auto& participant : confInfo_) {
1331 0 : if (participant.sinkId == streamId) {
1332 0 : exists = true;
1333 0 : break;
1334 : }
1335 : }
1336 :
1337 0 : if (!exists) {
1338 0 : JAMI_ERROR("[conf:{}] Participant not found with streamId: {}", id_, streamId);
1339 0 : return;
1340 : }
1341 :
1342 0 : auto previousState = isVoiceActive(streamId);
1343 :
1344 0 : if (previousState == newState) {
1345 : // no change, do not send out updates
1346 0 : return;
1347 : }
1348 :
1349 0 : if (newState and not previousState) {
1350 : // voice going from inactive to active
1351 0 : streamsVoiceActive.emplace(streamId);
1352 0 : updateVoiceActivity();
1353 0 : return;
1354 : }
1355 :
1356 0 : if (not newState and previousState) {
1357 : // voice going from active to inactive
1358 0 : streamsVoiceActive.erase(streamId);
1359 0 : updateVoiceActivity();
1360 0 : return;
1361 : }
1362 : }
1363 :
1364 : void
1365 1 : Conference::setModerator(const std::string& participant_id, const bool& state)
1366 : {
1367 3 : for (const auto& p : getSubCalls()) {
1368 3 : if (auto call = getCall(p)) {
1369 3 : auto isPeerModerator = isModerator(participant_id);
1370 3 : if (participant_id == getRemoteId(call)) {
1371 1 : if (state and not isPeerModerator) {
1372 0 : moderators_.emplace(participant_id);
1373 0 : updateModerators();
1374 1 : } else if (not state and isPeerModerator) {
1375 1 : moderators_.erase(participant_id);
1376 1 : updateModerators();
1377 : }
1378 1 : return;
1379 : }
1380 3 : }
1381 1 : }
1382 0 : JAMI_WARNING("[conf:{}] Failed to set moderator {} (participant not found)", id_, participant_id);
1383 : }
1384 :
1385 : void
1386 1 : Conference::updateModerators()
1387 : {
1388 1 : std::lock_guard lk(confInfoMutex_);
1389 5 : for (auto& info : confInfo_) {
1390 4 : info.isModerator = isModerator(string_remove_suffix(info.uri, '@'));
1391 : }
1392 1 : sendConferenceInfos();
1393 1 : }
1394 :
1395 : void
1396 7 : Conference::updateHandsRaised()
1397 : {
1398 7 : std::lock_guard lk(confInfoMutex_);
1399 33 : for (auto& info : confInfo_)
1400 26 : info.handRaised = isHandRaised(info.device);
1401 7 : sendConferenceInfos();
1402 7 : }
1403 :
1404 : void
1405 0 : Conference::updateVoiceActivity()
1406 : {
1407 0 : std::lock_guard lk(confInfoMutex_);
1408 :
1409 : // streamId is actually sinkId
1410 0 : for (ParticipantInfo& participantInfo : confInfo_) {
1411 : bool newActivity;
1412 :
1413 0 : if (auto call = getCallWith(std::string(string_remove_suffix(participantInfo.uri, '@')),
1414 0 : participantInfo.device)) {
1415 : // if this participant is in a direct call with us
1416 : // grab voice activity info directly from the call
1417 0 : newActivity = call->hasPeerVoice();
1418 : } else {
1419 : // check for it
1420 0 : newActivity = isVoiceActive(participantInfo.sinkId);
1421 0 : }
1422 :
1423 0 : if (participantInfo.voiceActivity != newActivity) {
1424 0 : participantInfo.voiceActivity = newActivity;
1425 : }
1426 : }
1427 0 : sendConferenceInfos(); // also emits signal to client
1428 0 : }
1429 :
1430 : void
1431 189 : Conference::foreachCall(const std::function<void(const std::shared_ptr<Call>& call)>& cb)
1432 : {
1433 422 : for (const auto& p : getSubCalls())
1434 233 : if (auto call = getCall(p))
1435 422 : cb(call);
1436 189 : }
1437 :
1438 : bool
1439 418 : Conference::isMuted(std::string_view callId) const
1440 : {
1441 418 : return participantsMuted_.find(callId) != participantsMuted_.end();
1442 : }
1443 :
1444 : void
1445 3 : Conference::muteStream(const std::string& accountUri, const std::string& deviceId, const std::string&, const bool& state)
1446 : {
1447 6 : if (auto acc = std::dynamic_pointer_cast<JamiAccount>(account_.lock())) {
1448 3 : if (accountUri == acc->getUsername() && deviceId == acc->currentDeviceId()) {
1449 0 : muteHost(state);
1450 3 : } else if (auto call = getCallWith(accountUri, deviceId)) {
1451 3 : muteCall(call->getCallId(), state);
1452 : } else {
1453 0 : JAMI_WARNING("[conf:{}] No call with {} - {}", id_, accountUri, deviceId);
1454 3 : }
1455 3 : }
1456 3 : }
1457 :
1458 : void
1459 0 : Conference::muteHost(bool state)
1460 : {
1461 0 : auto isHostMuted = isMuted("host"sv);
1462 0 : if (state and not isHostMuted) {
1463 0 : participantsMuted_.emplace("host"sv);
1464 0 : if (not isMediaSourceMuted(MediaType::MEDIA_AUDIO)) {
1465 0 : unbindHostAudio();
1466 : }
1467 0 : } else if (not state and isHostMuted) {
1468 0 : participantsMuted_.erase("host");
1469 0 : if (not isMediaSourceMuted(MediaType::MEDIA_AUDIO)) {
1470 0 : bindHostAudio();
1471 : }
1472 : }
1473 0 : updateMuted();
1474 0 : }
1475 :
1476 : void
1477 3 : Conference::muteCall(const std::string& callId, bool state)
1478 : {
1479 3 : auto isPartMuted = isMuted(callId);
1480 3 : if (state and not isPartMuted) {
1481 2 : participantsMuted_.emplace(callId);
1482 2 : unbindSubCallAudio(callId);
1483 2 : updateMuted();
1484 1 : } else if (not state and isPartMuted) {
1485 1 : participantsMuted_.erase(callId);
1486 1 : bindSubCallAudio(callId);
1487 1 : updateMuted();
1488 : }
1489 3 : }
1490 :
1491 : void
1492 0 : Conference::muteParticipant(const std::string& participant_id, const bool& state)
1493 : {
1494 : // Prioritize remote mute, otherwise the mute info is lost during
1495 : // the conference merge (we don't send back info to remoteHost,
1496 : // cf. getConfInfoHostUri method)
1497 :
1498 : // Transfer remote participant mute
1499 0 : auto remoteHost = findHostforRemoteParticipant(participant_id);
1500 0 : if (not remoteHost.empty()) {
1501 0 : if (auto call = getCallFromPeerID(string_remove_suffix(remoteHost, '@'))) {
1502 0 : auto w = call->getAccount();
1503 0 : auto account = w.lock();
1504 0 : if (!account)
1505 0 : return;
1506 0 : Json::Value root;
1507 0 : root["muteParticipant"] = participant_id;
1508 0 : root["muteState"] = state ? TRUE_STR : FALSE_STR;
1509 0 : call->sendConfOrder(root);
1510 0 : return;
1511 0 : }
1512 : }
1513 :
1514 : // NOTE: For now we have no way to mute only one stream
1515 0 : if (isHost(participant_id))
1516 0 : muteHost(state);
1517 0 : else if (auto call = getCallFromPeerID(participant_id))
1518 0 : muteCall(call->getCallId(), state);
1519 : }
1520 :
1521 : void
1522 6 : Conference::updateRecording()
1523 : {
1524 6 : std::lock_guard lk(confInfoMutex_);
1525 24 : for (auto& info : confInfo_) {
1526 18 : if (info.uri.empty()) {
1527 6 : info.recording = isRecording();
1528 24 : } else if (auto call = getCallWith(std::string(string_remove_suffix(info.uri, '@')), info.device)) {
1529 12 : info.recording = call->isPeerRecording();
1530 12 : }
1531 : }
1532 6 : sendConferenceInfos();
1533 6 : }
1534 :
1535 : void
1536 4 : Conference::updateMuted()
1537 : {
1538 4 : std::lock_guard lk(confInfoMutex_);
1539 15 : for (auto& info : confInfo_) {
1540 11 : if (info.uri.empty()) {
1541 4 : info.audioModeratorMuted = isMuted("host"sv);
1542 4 : info.audioLocalMuted = isMediaSourceMuted(MediaType::MEDIA_AUDIO);
1543 14 : } else if (auto call = getCallWith(std::string(string_remove_suffix(info.uri, '@')), info.device)) {
1544 7 : info.audioModeratorMuted = isMuted(call->getCallId());
1545 7 : info.audioLocalMuted = call->isPeerMuted();
1546 7 : }
1547 : }
1548 4 : sendConferenceInfos();
1549 4 : }
1550 :
1551 : ConfInfo
1552 375 : Conference::getConfInfoHostUri(std::string_view localHostURI, std::string_view destURI)
1553 : {
1554 375 : ConfInfo newInfo = confInfo_;
1555 :
1556 1379 : for (auto it = newInfo.begin(); it != newInfo.end();) {
1557 1004 : bool isRemoteHost = remoteHosts_.find(it->uri) != remoteHosts_.end();
1558 1004 : if (it->uri.empty() and not destURI.empty()) {
1559 : // fill the empty uri with the local host URI, let void for local client
1560 217 : it->uri = localHostURI;
1561 : // If we're detached, remove the host
1562 217 : if (getState() == State::ACTIVE_DETACHED) {
1563 25 : it = newInfo.erase(it);
1564 25 : continue;
1565 : }
1566 : }
1567 979 : if (isRemoteHost) {
1568 : // Don't send back the ParticipantInfo for remote Host
1569 : // For other than remote Host, the new info is in remoteHosts_
1570 0 : it = newInfo.erase(it);
1571 : } else {
1572 979 : ++it;
1573 : }
1574 : }
1575 : // Add remote Host info
1576 375 : for (const auto& [hostUri, confInfo] : remoteHosts_) {
1577 : // Add remote info for remote host destination
1578 : // Example: ConfA, ConfB & ConfC
1579 : // ConfA send ConfA and ConfB for ConfC
1580 : // ConfA send ConfA and ConfC for ConfB
1581 : // ...
1582 0 : if (destURI != hostUri)
1583 0 : newInfo.insert(newInfo.end(), confInfo.begin(), confInfo.end());
1584 : }
1585 375 : return newInfo;
1586 0 : }
1587 :
1588 : bool
1589 71 : Conference::isHost(std::string_view uri) const
1590 : {
1591 71 : if (uri.empty())
1592 69 : return true;
1593 :
1594 : // Check if the URI is a local URI (AccountID) for at least one of the subcall
1595 : // (a local URI can be in the call with another device)
1596 8 : for (const auto& p : getSubCalls()) {
1597 6 : if (auto call = getCall(p)) {
1598 12 : if (auto account = call->getAccount().lock()) {
1599 6 : if (account->getUsername() == uri)
1600 0 : return true;
1601 6 : }
1602 6 : }
1603 2 : }
1604 2 : return false;
1605 : }
1606 :
1607 : bool
1608 227 : Conference::isHostDevice(std::string_view deviceId) const
1609 : {
1610 454 : if (auto acc = std::dynamic_pointer_cast<JamiAccount>(account_.lock()))
1611 227 : return deviceId == acc->currentDeviceId();
1612 0 : return false;
1613 : }
1614 :
1615 : void
1616 68 : Conference::updateConferenceInfo(ConfInfo confInfo)
1617 : {
1618 68 : std::lock_guard lk(confInfoMutex_);
1619 68 : confInfo_ = std::move(confInfo);
1620 68 : sendConferenceInfos();
1621 68 : }
1622 :
1623 : void
1624 1 : Conference::hangupParticipant(const std::string& accountUri, const std::string& deviceId)
1625 : {
1626 2 : if (auto acc = std::dynamic_pointer_cast<JamiAccount>(account_.lock())) {
1627 1 : if (deviceId.empty()) {
1628 : // If deviceId is empty, hangup all calls with device
1629 0 : while (auto call = getCallFromPeerID(accountUri)) {
1630 0 : Manager::instance().hangupCall(acc->getAccountID(), call->getCallId());
1631 0 : }
1632 1 : return;
1633 : } else {
1634 1 : if (accountUri == acc->getUsername() && deviceId == acc->currentDeviceId()) {
1635 0 : Manager::instance().detachHost(shared_from_this());
1636 0 : return;
1637 1 : } else if (auto call = getCallWith(accountUri, deviceId)) {
1638 1 : Manager::instance().hangupCall(acc->getAccountID(), call->getCallId());
1639 1 : return;
1640 1 : }
1641 : }
1642 : // Else, it may be a remote host
1643 0 : auto remoteHost = findHostforRemoteParticipant(accountUri, deviceId);
1644 0 : if (remoteHost.empty()) {
1645 0 : JAMI_WARNING("[conf:{}] Unable to hangup {} (peer not found)", id_, accountUri);
1646 0 : return;
1647 : }
1648 0 : if (auto call = getCallFromPeerID(string_remove_suffix(remoteHost, '@'))) {
1649 : // Forward to the remote host.
1650 0 : libjami::hangupParticipant(acc->getAccountID(), call->getCallId(), accountUri, deviceId);
1651 0 : }
1652 1 : }
1653 : }
1654 :
1655 : void
1656 1 : Conference::muteLocalHost(bool is_muted, const std::string& mediaType)
1657 : {
1658 1 : if (mediaType.compare(libjami::Media::Details::MEDIA_TYPE_AUDIO) == 0) {
1659 1 : if (is_muted == isMediaSourceMuted(MediaType::MEDIA_AUDIO)) {
1660 0 : JAMI_DEBUG("[conf:{}] Local audio source already {}", id_, is_muted ? "muted" : "unmuted");
1661 0 : return;
1662 : }
1663 :
1664 1 : auto isHostMuted = isMuted("host"sv);
1665 1 : if (is_muted and not isMediaSourceMuted(MediaType::MEDIA_AUDIO) and not isHostMuted) {
1666 1 : unbindHostAudio();
1667 0 : } else if (not is_muted and isMediaSourceMuted(MediaType::MEDIA_AUDIO) and not isHostMuted) {
1668 0 : bindHostAudio();
1669 : }
1670 1 : setLocalHostMuteState(MediaType::MEDIA_AUDIO, is_muted);
1671 1 : updateMuted();
1672 1 : emitSignal<libjami::CallSignal::AudioMuted>(id_, is_muted);
1673 1 : return;
1674 0 : } else if (mediaType.compare(libjami::Media::Details::MEDIA_TYPE_VIDEO) == 0) {
1675 : #ifdef ENABLE_VIDEO
1676 0 : if (not isVideoEnabled()) {
1677 0 : JAMI_ERROR("Unable to stop camera, the camera is disabled!");
1678 0 : return;
1679 : }
1680 :
1681 0 : if (is_muted == isMediaSourceMuted(MediaType::MEDIA_VIDEO)) {
1682 0 : JAMI_DEBUG("[conf:{}] Local camera source already {}", id_, is_muted ? "stopped" : "started");
1683 0 : return;
1684 : }
1685 0 : setLocalHostMuteState(MediaType::MEDIA_VIDEO, is_muted);
1686 0 : if (is_muted) {
1687 0 : if (auto mixer = videoMixer_) {
1688 0 : mixer->stopInputs();
1689 0 : }
1690 : } else {
1691 0 : if (auto mixer = videoMixer_) {
1692 0 : std::vector<std::string> videoInputs;
1693 0 : for (const auto& source : hostSources_) {
1694 0 : if (source.type_ == MediaType::MEDIA_VIDEO)
1695 0 : videoInputs.emplace_back(source.sourceUri_);
1696 : }
1697 0 : mixer->switchInputs(videoInputs);
1698 0 : }
1699 : }
1700 0 : emitSignal<libjami::CallSignal::VideoMuted>(id_, is_muted);
1701 0 : return;
1702 : #endif
1703 : }
1704 : }
1705 :
1706 : #ifdef ENABLE_VIDEO
1707 : void
1708 0 : Conference::resizeRemoteParticipants(ConfInfo& confInfo, std::string_view peerURI)
1709 : {
1710 0 : int remoteFrameHeight = confInfo.h;
1711 0 : int remoteFrameWidth = confInfo.w;
1712 :
1713 0 : if (remoteFrameHeight == 0 or remoteFrameWidth == 0) {
1714 : // get the size of the remote frame from receiveThread
1715 : // if the one from confInfo is empty
1716 0 : if (auto call = std::dynamic_pointer_cast<SIPCall>(getCallFromPeerID(string_remove_suffix(peerURI, '@')))) {
1717 0 : for (auto const& videoRtp : call->getRtpSessionList(MediaType::MEDIA_VIDEO)) {
1718 0 : auto recv = std::static_pointer_cast<video::VideoRtpSession>(videoRtp)->getVideoReceive();
1719 0 : remoteFrameHeight = recv->getHeight();
1720 0 : remoteFrameWidth = recv->getWidth();
1721 : // NOTE: this may be not the behavior we want, but this is only called
1722 : // when we receive conferences information from a call, so the peer is
1723 : // mixing the video and send only one stream, so we can break here
1724 0 : break;
1725 0 : }
1726 0 : }
1727 : }
1728 :
1729 0 : if (remoteFrameHeight == 0 or remoteFrameWidth == 0) {
1730 0 : JAMI_WARNING("[conf:{}] Remote frame size not found", id_);
1731 0 : return;
1732 : }
1733 :
1734 : // get the size of the local frame
1735 0 : ParticipantInfo localCell;
1736 0 : for (const auto& p : confInfo_) {
1737 0 : if (p.uri == peerURI) {
1738 0 : localCell = p;
1739 0 : break;
1740 : }
1741 : }
1742 :
1743 0 : const float zoomX = (float) remoteFrameWidth / localCell.w;
1744 0 : const float zoomY = (float) remoteFrameHeight / localCell.h;
1745 : // Do the resize for each remote participant
1746 0 : for (auto& remoteCell : confInfo) {
1747 0 : remoteCell.x = remoteCell.x / zoomX + localCell.x;
1748 0 : remoteCell.y = remoteCell.y / zoomY + localCell.y;
1749 0 : remoteCell.w = remoteCell.w / zoomX;
1750 0 : remoteCell.h = remoteCell.h / zoomY;
1751 : }
1752 0 : }
1753 : #endif
1754 :
1755 : void
1756 0 : Conference::mergeConfInfo(ConfInfo& newInfo, const std::string& peerURI)
1757 : {
1758 0 : JAMI_DEBUG("[conf:{}] Merging confInfo from {}", id_, peerURI);
1759 0 : if (newInfo.empty()) {
1760 0 : JAMI_DEBUG("[conf:{}] confInfo empty, removing remoteHost {}", id_, peerURI);
1761 0 : std::lock_guard lk(confInfoMutex_);
1762 0 : remoteHosts_.erase(peerURI);
1763 0 : sendConferenceInfos();
1764 0 : return;
1765 0 : }
1766 :
1767 : #ifdef ENABLE_VIDEO
1768 0 : resizeRemoteParticipants(newInfo, peerURI);
1769 : #endif
1770 :
1771 0 : std::lock_guard lk(confInfoMutex_);
1772 0 : bool updateNeeded = false;
1773 0 : auto it = remoteHosts_.find(peerURI);
1774 0 : if (it != remoteHosts_.end()) {
1775 : // Compare confInfo before update
1776 0 : if (it->second != newInfo) {
1777 0 : it->second = newInfo;
1778 0 : updateNeeded = true;
1779 : }
1780 : } else {
1781 0 : remoteHosts_.emplace(peerURI, newInfo);
1782 0 : updateNeeded = true;
1783 : }
1784 : // Send confInfo only if needed to avoid loops
1785 : #ifdef ENABLE_VIDEO
1786 0 : if (updateNeeded and videoMixer_) {
1787 : // Trigger the layout update in the mixer because the frame resolution may
1788 : // change from participant to conference and cause a mismatch between
1789 : // confInfo layout and rendering layout.
1790 0 : videoMixer_->updateLayout();
1791 : }
1792 : #endif
1793 0 : if (updateNeeded)
1794 0 : sendConferenceInfos();
1795 0 : }
1796 :
1797 : std::string_view
1798 0 : Conference::findHostforRemoteParticipant(std::string_view uri, std::string_view deviceId)
1799 : {
1800 0 : for (const auto& host : remoteHosts_) {
1801 0 : for (const auto& p : host.second) {
1802 0 : if (uri == string_remove_suffix(p.uri, '@') && (deviceId == "" || deviceId == p.device))
1803 0 : return host.first;
1804 : }
1805 : }
1806 0 : return "";
1807 : }
1808 :
1809 : std::shared_ptr<Call>
1810 0 : Conference::getCallFromPeerID(std::string_view peerID)
1811 : {
1812 0 : for (const auto& p : getSubCalls()) {
1813 0 : auto call = getCall(p);
1814 0 : if (call && getRemoteId(call) == peerID) {
1815 0 : return call;
1816 : }
1817 0 : }
1818 0 : return nullptr;
1819 : }
1820 :
1821 : std::shared_ptr<Call>
1822 23 : Conference::getCallWith(const std::string& accountUri, const std::string& deviceId)
1823 : {
1824 36 : for (const auto& p : getSubCalls()) {
1825 72 : if (auto call = std::dynamic_pointer_cast<SIPCall>(getCall(p))) {
1826 36 : auto* transport = call->getTransport();
1827 59 : if (accountUri == string_remove_suffix(call->getPeerNumber(), '@') && transport
1828 59 : && deviceId == transport->deviceId()) {
1829 23 : return call;
1830 : }
1831 36 : }
1832 23 : }
1833 0 : return {};
1834 : }
1835 :
1836 : std::string
1837 141 : Conference::getRemoteId(const std::shared_ptr<jami::Call>& call) const
1838 : {
1839 141 : if (auto* transport = std::dynamic_pointer_cast<SIPCall>(call)->getTransport())
1840 141 : if (auto cert = transport->getTlsInfos().peerCert)
1841 139 : if (cert->issuer)
1842 141 : return cert->issuer->getId().toString();
1843 2 : return {};
1844 : }
1845 :
1846 : void
1847 1 : Conference::stopRecording()
1848 : {
1849 1 : Recordable::stopRecording();
1850 1 : updateRecording();
1851 1 : }
1852 :
1853 : bool
1854 1 : Conference::startRecording(const std::string& path)
1855 : {
1856 1 : auto res = Recordable::startRecording(path);
1857 1 : updateRecording();
1858 1 : return res;
1859 : }
1860 :
1861 : /// PRIVATE
1862 :
1863 : void
1864 34 : Conference::bindHostAudio()
1865 : {
1866 136 : JAMI_DEBUG("[conf:{}] Binding host audio", id_);
1867 :
1868 34 : auto& rbPool = Manager::instance().getRingBufferPool();
1869 :
1870 : // Collect and start host audio sources, separating primary from secondary.
1871 : // The primary host buffer (DEFAULT_ID) forms the bidirectional link with
1872 : // each subcall's primary stream. Secondary host buffers are added as
1873 : // half-duplex sources so that participants hear the mix of all host streams.
1874 34 : std::string hostPrimaryBuffer;
1875 34 : std::vector<std::string> hostSecondaryBuffers;
1876 :
1877 98 : for (const auto& source : hostSources_) {
1878 64 : if (source.type_ != MediaType::MEDIA_AUDIO)
1879 30 : continue;
1880 :
1881 : // Start audio input
1882 34 : auto& hostAudioInput = hostAudioInputs_[source.label_];
1883 34 : if (!hostAudioInput)
1884 0 : hostAudioInput = std::make_shared<AudioInput>(source.label_);
1885 34 : hostAudioInput->switchInput(source.sourceUri_);
1886 :
1887 34 : if (source.label_ == sip_utils::DEFAULT_AUDIO_STREAMID) {
1888 34 : hostPrimaryBuffer = std::string(RingBufferPool::DEFAULT_ID);
1889 136 : JAMI_DEBUG("[conf:{}] Primary host buffer: {}", id_, hostPrimaryBuffer);
1890 : } else {
1891 : // Use the ring buffer ID that initCapture/initFile actually
1892 : // created, not the raw sourceUri which may differ (e.g.
1893 : // "display://:0+0,0 1920x1080" vs the normalized "desktop").
1894 0 : auto bufferId = hostAudioInput->getSourceRingBufferId();
1895 0 : if (!bufferId.empty()) {
1896 0 : if (source.muted_) {
1897 : // Muted secondary source: silence the AudioInput and
1898 : // remove its buffer from the mix so participants no
1899 : // longer receive data from it.
1900 0 : JAMI_DEBUG("[conf:{}] Secondary host buffer {} is muted – unbinding", id_, bufferId);
1901 0 : hostAudioInput->setMuted(true);
1902 0 : rbPool.unBindAllHalfDuplexIn(bufferId);
1903 : } else {
1904 0 : JAMI_DEBUG("[conf:{}] Secondary host buffer: {}", id_, bufferId);
1905 0 : hostAudioInput->setMuted(false);
1906 0 : hostSecondaryBuffers.push_back(std::move(bufferId));
1907 : }
1908 : } else {
1909 0 : JAMI_WARNING("[conf:{}] No source ring buffer for host audio {}", id_, source.label_);
1910 : }
1911 0 : }
1912 : }
1913 :
1914 34 : if (hostPrimaryBuffer.empty())
1915 0 : return;
1916 :
1917 38 : for (const auto& item : getSubCalls()) {
1918 4 : auto call = getCall(item);
1919 4 : if (!call)
1920 0 : continue;
1921 :
1922 4 : const bool participantMuted = isMuted(call->getCallId());
1923 4 : const auto medias = call->getRemoteAudioStreams();
1924 :
1925 : // Identify participant's primary (first) and secondary audio streams.
1926 : // Only the primary stream receives the conference mix (bidirectional).
1927 : // Secondary streams are mixed in as sources for other participants.
1928 4 : std::string participantPrimary;
1929 4 : std::vector<std::string> participantSecondaries;
1930 8 : for (const auto& [id, muted] : medias) {
1931 4 : if (participantPrimary.empty())
1932 4 : participantPrimary = id;
1933 : else
1934 0 : participantSecondaries.push_back(id);
1935 : }
1936 :
1937 4 : if (participantPrimary.empty())
1938 0 : continue;
1939 :
1940 4 : const bool primaryMuted = medias.at(participantPrimary);
1941 4 : const bool participantCanSend = !(participantMuted || primaryMuted);
1942 :
1943 : // Host primary <-> participant primary (bidirectional with mute logic)
1944 4 : if (participantCanSend)
1945 4 : rbPool.bindRingBuffers(participantPrimary, hostPrimaryBuffer);
1946 : else
1947 0 : rbPool.bindHalfDuplexOut(participantPrimary, hostPrimaryBuffer);
1948 :
1949 : // Host secondary sources -> participant primary
1950 : // (participant hears all host audio streams mixed together)
1951 4 : for (const auto& secBuffer : hostSecondaryBuffers)
1952 0 : rbPool.bindHalfDuplexOut(participantPrimary, secBuffer);
1953 :
1954 : // Participant secondary streams -> host primary
1955 : // (host hears all participant audio streams mixed together)
1956 4 : for (const auto& secId : participantSecondaries) {
1957 0 : const bool secMuted = medias.at(secId);
1958 0 : if (!(participantMuted || secMuted))
1959 0 : rbPool.bindHalfDuplexOut(hostPrimaryBuffer, secId);
1960 : }
1961 :
1962 4 : rbPool.flush(participantPrimary);
1963 4 : for (const auto& secId : participantSecondaries)
1964 0 : rbPool.flush(secId);
1965 38 : }
1966 :
1967 34 : rbPool.flush(hostPrimaryBuffer);
1968 34 : for (const auto& secBuffer : hostSecondaryBuffers)
1969 0 : rbPool.flush(secBuffer);
1970 34 : }
1971 :
1972 : void
1973 27 : Conference::unbindHostAudio()
1974 : {
1975 108 : JAMI_DEBUG("[conf:{}] Unbinding host audio", id_);
1976 27 : auto& rbPool = Manager::instance().getRingBufferPool();
1977 :
1978 76 : for (const auto& source : hostSources_) {
1979 49 : if (source.type_ != MediaType::MEDIA_AUDIO)
1980 22 : continue;
1981 :
1982 : // Determine the buffer ID to unbind before stopping the input,
1983 : // since switchInput("") resets the source ring buffer ID.
1984 27 : std::string bufferId;
1985 27 : auto hostAudioInput = hostAudioInputs_.find(source.label_);
1986 27 : if (hostAudioInput != hostAudioInputs_.end() && hostAudioInput->second) {
1987 27 : if (source.label_ == sip_utils::DEFAULT_AUDIO_STREAMID)
1988 27 : bufferId = std::string(RingBufferPool::DEFAULT_ID);
1989 : else
1990 0 : bufferId = hostAudioInput->second->getSourceRingBufferId();
1991 : // Stop audio input
1992 27 : hostAudioInput->second->switchInput("");
1993 : }
1994 :
1995 : // Unbind audio: remove this buffer as a source from all readers.
1996 27 : if (!bufferId.empty())
1997 27 : rbPool.unBindAllHalfDuplexIn(bufferId);
1998 27 : }
1999 27 : }
2000 :
2001 : void
2002 72 : Conference::bindSubCallAudio(const std::string& callId)
2003 : {
2004 72 : auto& rbPool = Manager::instance().getRingBufferPool();
2005 :
2006 72 : auto participantCall = getCall(callId);
2007 72 : if (!participantCall)
2008 1 : return;
2009 :
2010 71 : const bool participantMuted = isMuted(callId);
2011 71 : const auto participantStreams = participantCall->getRemoteAudioStreams();
2012 284 : JAMI_DEBUG("[conf:{}] Binding participant audio: {} with {} streams", id_, callId, participantStreams.size());
2013 :
2014 : // Identify participant's primary (first) and secondary audio streams.
2015 : // The primary stream forms the bidirectional link with other participants'
2016 : // primary streams and the host. Secondary streams are mixed in as
2017 : // half-duplex sources so that other participants (and the host) hear the
2018 : // combined audio from all of this participant's streams.
2019 71 : std::string primaryStreamId;
2020 71 : std::vector<std::string> secondaryStreamIds;
2021 142 : for (const auto& [streamId, muted] : participantStreams) {
2022 71 : if (primaryStreamId.empty())
2023 71 : primaryStreamId = streamId;
2024 : else
2025 0 : secondaryStreamIds.push_back(streamId);
2026 : }
2027 :
2028 71 : if (primaryStreamId.empty())
2029 0 : return;
2030 :
2031 71 : const bool primaryMuted = participantStreams.at(primaryStreamId);
2032 71 : const bool participantPrimaryCanSend = !(participantMuted || primaryMuted);
2033 :
2034 : // --- Bind with other subcalls ---
2035 194 : for (const auto& otherId : getSubCalls()) {
2036 123 : if (otherId == callId)
2037 71 : continue;
2038 :
2039 52 : auto otherCall = getCall(otherId);
2040 52 : if (!otherCall)
2041 0 : continue;
2042 :
2043 52 : const bool otherMuted = isMuted(otherId);
2044 52 : const auto otherStreams = otherCall->getRemoteAudioStreams();
2045 :
2046 : // Identify the other participant's primary and secondary streams
2047 52 : std::string otherPrimaryId;
2048 52 : std::vector<std::string> otherSecondaryIds;
2049 104 : for (const auto& [streamId, muted] : otherStreams) {
2050 52 : if (otherPrimaryId.empty())
2051 52 : otherPrimaryId = streamId;
2052 : else
2053 0 : otherSecondaryIds.push_back(streamId);
2054 : }
2055 :
2056 52 : if (otherPrimaryId.empty())
2057 0 : continue;
2058 :
2059 52 : const bool otherPrimaryMuted = otherStreams.at(otherPrimaryId);
2060 52 : const bool otherPrimaryCanSend = !(otherMuted || otherPrimaryMuted);
2061 :
2062 : // Primary <-> primary (bidirectional with mute logic)
2063 52 : if (participantPrimaryCanSend && otherPrimaryCanSend) {
2064 50 : rbPool.bindRingBuffers(primaryStreamId, otherPrimaryId);
2065 : } else {
2066 2 : if (participantPrimaryCanSend)
2067 0 : rbPool.bindHalfDuplexOut(otherPrimaryId, primaryStreamId);
2068 2 : if (otherPrimaryCanSend)
2069 2 : rbPool.bindHalfDuplexOut(primaryStreamId, otherPrimaryId);
2070 : }
2071 :
2072 : // Participant's secondaries -> other's primary
2073 : // (other participant hears all of this participant's streams mixed)
2074 52 : for (const auto& secId : secondaryStreamIds) {
2075 0 : const bool secMuted = participantStreams.at(secId);
2076 0 : if (!(participantMuted || secMuted))
2077 0 : rbPool.bindHalfDuplexOut(otherPrimaryId, secId);
2078 : }
2079 :
2080 : // Other's secondaries -> participant's primary
2081 : // (this participant hears all of the other's streams mixed)
2082 52 : for (const auto& otherSecId : otherSecondaryIds) {
2083 0 : const bool otherSecMuted = otherStreams.at(otherSecId);
2084 0 : if (!(otherMuted || otherSecMuted))
2085 0 : rbPool.bindHalfDuplexOut(primaryStreamId, otherSecId);
2086 : }
2087 :
2088 52 : rbPool.flush(primaryStreamId);
2089 52 : rbPool.flush(otherPrimaryId);
2090 123 : }
2091 :
2092 : // --- Bind with host (if attached) ---
2093 71 : if (getState() == State::ACTIVE_ATTACHED) {
2094 67 : const bool hostCanSend = !(isMuted("host"sv) || isMediaSourceMuted(MediaType::MEDIA_AUDIO));
2095 :
2096 : // Primary <-> host default buffer (bidirectional with mute logic)
2097 67 : if (participantPrimaryCanSend && hostCanSend) {
2098 61 : rbPool.bindRingBuffers(primaryStreamId, RingBufferPool::DEFAULT_ID);
2099 : } else {
2100 6 : if (participantPrimaryCanSend)
2101 3 : rbPool.bindHalfDuplexOut(RingBufferPool::DEFAULT_ID, primaryStreamId);
2102 6 : if (hostCanSend)
2103 0 : rbPool.bindHalfDuplexOut(primaryStreamId, RingBufferPool::DEFAULT_ID);
2104 : }
2105 :
2106 : // Participant's secondaries -> host
2107 : // (host hears all of this participant's streams mixed)
2108 67 : for (const auto& secId : secondaryStreamIds) {
2109 0 : const bool secMuted = participantStreams.at(secId);
2110 0 : if (!(participantMuted || secMuted))
2111 0 : rbPool.bindHalfDuplexOut(RingBufferPool::DEFAULT_ID, secId);
2112 : }
2113 :
2114 : // Host's secondary sources -> participant primary
2115 : // (participant hears all host audio sources mixed)
2116 189 : for (const auto& source : hostSources_) {
2117 122 : if (source.type_ == MediaType::MEDIA_AUDIO && source.label_ != sip_utils::DEFAULT_AUDIO_STREAMID) {
2118 0 : auto it = hostAudioInputs_.find(source.label_);
2119 0 : if (it != hostAudioInputs_.end() && it->second) {
2120 0 : auto buffer = it->second->getSourceRingBufferId();
2121 0 : if (!buffer.empty())
2122 0 : rbPool.bindHalfDuplexOut(primaryStreamId, buffer);
2123 0 : }
2124 : }
2125 : }
2126 :
2127 67 : rbPool.flush(primaryStreamId);
2128 67 : rbPool.flush(RingBufferPool::DEFAULT_ID);
2129 : }
2130 :
2131 : // Flush secondary streams
2132 71 : for (const auto& secId : secondaryStreamIds)
2133 0 : rbPool.flush(secId);
2134 72 : }
2135 :
2136 : void
2137 68 : Conference::unbindSubCallAudio(const std::string& callId)
2138 : {
2139 272 : JAMI_DEBUG("[conf:{}] Unbinding participant audio: {}", id_, callId);
2140 68 : if (auto call = getCall(callId)) {
2141 68 : auto medias = call->getAudioStreams();
2142 68 : auto& rbPool = Manager::instance().getRingBufferPool();
2143 :
2144 68 : bool isPrimary = true;
2145 136 : for (const auto& [id, muted] : medias) {
2146 : // Remove this stream as a source from all readers.
2147 68 : rbPool.unBindAllHalfDuplexIn(id);
2148 : // For the primary stream, also remove its reader bindings
2149 : // (it was the only stream receiving the conference mix).
2150 68 : if (isPrimary) {
2151 68 : rbPool.unBindAllHalfDuplexOut(id);
2152 68 : isPrimary = false;
2153 : }
2154 : }
2155 136 : }
2156 68 : }
2157 :
2158 : void
2159 64 : Conference::clearParticipantData(const std::string& callId)
2160 : {
2161 256 : JAMI_DEBUG("[conf:{}] Clearing participant data for call {}", id_, callId);
2162 :
2163 64 : if (callId.empty()) {
2164 0 : JAMI_WARNING("[conf:{}] Cannot clear participant data: empty call id", id_);
2165 0 : return;
2166 : }
2167 :
2168 64 : auto call = std::dynamic_pointer_cast<SIPCall>(getCall(callId));
2169 64 : if (!call) {
2170 0 : JAMI_WARNING("[conf:{}] Unable to find call {} to clear participant", id_, callId);
2171 0 : return;
2172 : }
2173 :
2174 64 : auto* transport = call->getTransport();
2175 64 : if (!transport) {
2176 0 : JAMI_WARNING("[conf:{}] Unable to find transport for call {} to clear participant", id_, callId);
2177 0 : return;
2178 : }
2179 :
2180 64 : const std::string deviceId = std::string(transport->deviceId());
2181 64 : const std::string participantId = getRemoteId(call);
2182 :
2183 : {
2184 64 : std::lock_guard lk(confInfoMutex_);
2185 218 : for (auto it = confInfo_.begin(); it != confInfo_.end();) {
2186 154 : if (it->uri == participantId) {
2187 52 : it = confInfo_.erase(it);
2188 : } else {
2189 102 : ++it;
2190 : }
2191 : }
2192 64 : auto remoteIt = remoteHosts_.find(participantId);
2193 64 : if (remoteIt != remoteHosts_.end()) {
2194 0 : remoteHosts_.erase(remoteIt);
2195 : }
2196 64 : handsRaised_.erase(deviceId);
2197 64 : moderators_.erase(participantId);
2198 64 : participantsMuted_.erase(callId);
2199 64 : }
2200 :
2201 64 : sendConferenceInfos();
2202 64 : }
2203 :
2204 : } // namespace jami
|