From 553bd421ff931baa8ef3704b05ce860bb354699e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?S=C3=A9bastien=20Blin?=
 <sebastien.blin@savoirfairelinux.com>
Date: Thu, 9 Jun 2022 11:20:24 -0400
Subject: [PATCH] conferences: support multistream

In a conference, with this patch, the user is able to share
a media while keeping the camera. The mute button for the camera
only control the camera and we use requestMediaChange to add medias.
To stop sharing a media, the user will click on the dedicated button.
If the peer is not compatible with multistream, the behavior will
be the same as before

https://git.jami.net/savoirfairelinux/jami-project/-/issues/1429
Change-Id: If5e0634be6d0d06900ad82554b32a524fd4db36c
---
 src/app/avadapter.cpp                         | 119 ++++++---
 src/app/avadapter.h                           |  15 ++
 src/app/calladapter.cpp                       |  94 ++++---
 src/app/calladapter.h                         |   4 +-
 src/app/constant/JamiStrings.qml              |   4 +-
 src/app/lrcinstance.cpp                       |   4 +-
 src/app/lrcinstance.h                         |   4 +-
 src/app/mainview/components/CallActionBar.qml |   6 +-
 src/app/mainview/components/CallOverlay.qml   |   6 +-
 .../components/CallViewContextMenu.qml        |  11 +-
 .../mainview/components/OngoingCallPage.qml   |  22 +-
 src/libclient/api/call.h                      |  11 +-
 src/libclient/api/callparticipantsmodel.h     |   7 +-
 src/libclient/api/newcallmodel.h              |  47 ++--
 src/libclient/callparticipantsmodel.cpp       |  21 +-
 src/libclient/newcallmodel.cpp                | 241 ++++++++++--------
 src/libclient/qtwrapper/callmanager_wrap.h    |  44 +++-
 src/libclient/qtwrapper/conversions_wrap.hpp  |   3 -
 src/libclient/typedefs.h                      |  33 ++-
 19 files changed, 437 insertions(+), 259 deletions(-)

diff --git a/src/app/avadapter.cpp b/src/app/avadapter.cpp
index c46836e9f..5ba52199a 100644
--- a/src/app/avadapter.cpp
+++ b/src/app/avadapter.cpp
@@ -33,6 +33,8 @@
 #include <QPainter>
 #include <QScreen>
 
+#include <media_const.h>
+
 AvAdapter::AvAdapter(LRCInstance* instance, QObject* parent)
     : QmlAdapterBase(instance, parent)
 {
@@ -94,12 +96,10 @@ AvAdapter::shareEntireScreen(int screenNumber)
                                      rect.width() * screen->devicePixelRatio(),
                                      rect.height() * screen->devicePixelRatio());
     auto callId = lrcInstance_->getCurrentCallId();
+    if (hasCamera())
+        muteCamera_ = !isCapturing();
     lrcInstance_->getCurrentCallModel()
-        ->requestMediaChange(callId,
-                             "video_0",
-                             resource,
-                             lrc::api::NewCallModel::MediaRequestType::SCREENSHARING,
-                             false);
+        ->addMedia(callId, resource, lrc::api::NewCallModel::MediaRequestType::SCREENSHARING);
 }
 
 void
@@ -113,12 +113,10 @@ AvAdapter::shareAllScreens()
                                                                     arrangementRect.width(),
                                                                     arrangementRect.height());
     auto callId = lrcInstance_->getCurrentCallId();
+    if (hasCamera())
+        muteCamera_ = !isCapturing();
     lrcInstance_->getCurrentCallModel()
-        ->requestMediaChange(callId,
-                             "video_0",
-                             resource,
-                             lrc::api::NewCallModel::MediaRequestType::SCREENSHARING,
-                             false);
+        ->addMedia(callId, resource, lrc::api::NewCallModel::MediaRequestType::SCREENSHARING);
 }
 
 void
@@ -180,18 +178,18 @@ AvAdapter::shareFile(const QString& filePath)
 {
     auto callId = lrcInstance_->getCurrentCallId();
     if (!callId.isEmpty()) {
+        if (hasCamera())
+            muteCamera_ = !isCapturing();
         lrcInstance_->getCurrentCallModel()
-            ->requestMediaChange(callId,
-                                 "video_0",
-                                 filePath,
-                                 lrc::api::NewCallModel::MediaRequestType::FILESHARING,
-                                 false);
+            ->addMedia(callId, filePath, lrc::api::NewCallModel::MediaRequestType::FILESHARING);
     }
 }
 
 void
 AvAdapter::shareScreenArea(unsigned x, unsigned y, unsigned width, unsigned height)
 {
+    if (hasCamera())
+        muteCamera_ = !isCapturing();
 #ifdef Q_OS_LINUX
     // xrectsel will freeze all displays too fast so that the call
     // context menu will not be closed even closed signal is emitted
@@ -206,11 +204,7 @@ AvAdapter::shareScreenArea(unsigned x, unsigned y, unsigned width, unsigned heig
                                                                         height < 128 ? 128 : height);
         auto callId = lrcInstance_->getCurrentCallId();
         lrcInstance_->getCurrentCallModel()
-            ->requestMediaChange(callId,
-                                 "video_0",
-                                 resource,
-                                 lrc::api::NewCallModel::MediaRequestType::SCREENSHARING,
-                                 false);
+            ->addMedia(callId, resource, lrc::api::NewCallModel::MediaRequestType::SCREENSHARING);
     });
 #else
     auto resource = lrcInstance_->getCurrentCallModel()->getDisplay(getScreenNumber(),
@@ -220,11 +214,7 @@ AvAdapter::shareScreenArea(unsigned x, unsigned y, unsigned width, unsigned heig
                                                                     height < 128 ? 128 : height);
     auto callId = lrcInstance_->getCurrentCallId();
     lrcInstance_->getCurrentCallModel()
-        ->requestMediaChange(callId,
-                             "video_0",
-                             resource,
-                             lrc::api::NewCallModel::MediaRequestType::SCREENSHARING,
-                             false);
+        ->addMedia(callId, resource, lrc::api::NewCallModel::MediaRequestType::SCREENSHARING);
 #endif
 }
 
@@ -233,12 +223,11 @@ AvAdapter::shareWindow(const QString& windowId)
 {
     auto resource = lrcInstance_->getCurrentCallModel()->getDisplay(windowId);
     auto callId = lrcInstance_->getCurrentCallId();
+
+    if (hasCamera())
+        muteCamera_ = !isCapturing();
     lrcInstance_->getCurrentCallModel()
-        ->requestMediaChange(callId,
-                             "video_0",
-                             resource,
-                             lrc::api::NewCallModel::MediaRequestType::SCREENSHARING,
-                             false);
+        ->addMedia(callId, resource, lrc::api::NewCallModel::MediaRequestType::SCREENSHARING);
 }
 
 QString
@@ -285,12 +274,10 @@ AvAdapter::stopSharing()
 {
     auto callId = lrcInstance_->getCurrentCallId();
     if (!callId.isEmpty()) {
-        lrcInstance_->getCurrentCallModel()
-            ->requestMediaChange(callId,
-                                 "video_0",
-                                 lrcInstance_->avModel().getCurrentVideoCaptureDevice(),
-                                 lrc::api::NewCallModel::MediaRequestType::CAMERA,
-                                 muteCamera_);
+        lrcInstance_->getCurrentCallModel()->removeMedia(callId,
+                                                         DRing::Media::Details::MEDIA_TYPE_VIDEO,
+                                                         DRing::Media::VideoProtocolPrefix::DISPLAY,
+                                                         muteCamera_);
     }
 }
 
@@ -321,10 +308,70 @@ AvAdapter::onRendererStarted(const QString& id)
     auto callId = lrcInstance_->getCurrentCallId();
     auto callModel = lrcInstance_->getCurrentCallModel();
     auto renderDevice = callModel->getCurrentRenderedDevice(callId);
+    if (!id.contains("://"))
+        return;
     set_currentRenderingDeviceId(id);
     set_currentRenderingDeviceType(renderDevice.type);
 }
 
+bool
+AvAdapter::isSharing() const
+{
+    try {
+        auto callId = lrcInstance_->getCurrentCallId();
+        auto callModel = lrcInstance_->getCurrentCallModel();
+        auto call = callModel->getCall(callId);
+        // TODO enum
+        return call.hasMediaWithType(DRing::Media::VideoProtocolPrefix::DISPLAY,
+                                     DRing::Media::Details::MEDIA_TYPE_VIDEO)
+               || call.hasMediaWithType("file:", DRing::Media::Details::MEDIA_TYPE_VIDEO);
+    } catch (...) {
+    }
+    return false;
+}
+
+bool
+AvAdapter::isCapturing() const
+{
+    try {
+        auto callId = lrcInstance_->getCurrentCallId();
+        auto callModel = lrcInstance_->getCurrentCallModel();
+        auto call = callModel->getCall(callId);
+        // TODO enum
+        for (const auto& m : call.mediaList) {
+            if (m[DRing::Media::MediaAttributeKey::SOURCE].startsWith(
+                    DRing::Media::VideoProtocolPrefix::CAMERA)
+                && m[DRing::Media::MediaAttributeKey::MEDIA_TYPE]
+                       == DRing::Media::Details::MEDIA_TYPE_VIDEO)
+                return m[DRing::Media::MediaAttributeKey::MUTED] == FALSE_STR;
+        }
+        return false;
+    } catch (...) {
+    }
+    return false;
+}
+
+bool
+AvAdapter::hasCamera() const
+{
+    try {
+        auto callId = lrcInstance_->getCurrentCallId();
+        auto callModel = lrcInstance_->getCurrentCallModel();
+        auto call = callModel->getCall(callId);
+        // TODO enum
+        for (const auto& m : call.mediaList) {
+            if (m[DRing::Media::MediaAttributeKey::SOURCE].startsWith(
+                    DRing::Media::VideoProtocolPrefix::CAMERA)
+                && m[DRing::Media::MediaAttributeKey::MEDIA_TYPE]
+                       == DRing::Media::Details::MEDIA_TYPE_VIDEO)
+                return true;
+        }
+        return false;
+    } catch (...) {
+    }
+    return false;
+}
+
 int
 AvAdapter::getScreenNumber() const
 {
diff --git a/src/app/avadapter.h b/src/app/avadapter.h
index a21dfad3f..793a6f438 100644
--- a/src/app/avadapter.h
+++ b/src/app/avadapter.h
@@ -49,6 +49,21 @@ Q_SIGNALS:
 protected:
     void safeInit() override {};
 
+    /**
+     * Check if user is sharing a media
+     */
+    Q_INVOKABLE bool isSharing() const;
+
+    /**
+     * Check if user is showing a camera
+     */
+    Q_INVOKABLE bool isCapturing() const;
+
+    /**
+     * Check if user has a camera (even muted)
+     */
+    Q_INVOKABLE bool hasCamera() const;
+
     // Share the screen specificed by screen number.
     Q_INVOKABLE void shareEntireScreen(int screenNumber);
 
diff --git a/src/app/calladapter.cpp b/src/app/calladapter.cpp
index c92a238f0..245f9db92 100644
--- a/src/app/calladapter.cpp
+++ b/src/app/calladapter.cpp
@@ -34,6 +34,8 @@
 
 #include <api/callparticipantsmodel.h>
 
+#include <media_const.h>
+
 CallAdapter::CallAdapter(SystemTray* systemTray, LRCInstance* instance, QObject* parent)
     : QmlAdapterBase(instance, parent)
     , systemTray_(systemTray)
@@ -119,7 +121,7 @@ CallAdapter::onCallStatusChanged(const QString& accountId, const QString& callId
     const auto call = callModel->getCall(callId);
 
     const auto& convInfo = lrcInstance_->getConversationFromCallId(callId, accountId);
-    if (convInfo.uid.isEmpty())
+    if (convInfo.uid.isEmpty() || call.isOutgoing)
         return;
 
     // handle notifications
@@ -312,6 +314,10 @@ CallAdapter::onCallInfosChanged(const QString& accountId, const QString& callId)
          */
         const auto& convInfo = lrcInstance_->getConversationFromCallId(callId);
         if (!convInfo.uid.isEmpty()) {
+            if (!convInfo.confId.isEmpty() && callId != convInfo.confId) {
+                // In this case the conv has a confId, ignore subcalls changes.
+                return;
+            }
             Q_EMIT callInfosChanged(call.isAudioOnly, accountId, convInfo.uid);
             participantsModel_->setConferenceLayout(static_cast<int>(call.layout), callId);
             updateCallOverlay(convInfo);
@@ -461,7 +467,7 @@ CallAdapter::onShowIncomingCallView(const QString& accountId, const QString& con
         auto currentCall = accInfo.callModel->getCall(currentConvInfo.callId);
         if ((currentCall.status == call::Status::CONNECTED
              || currentCall.status == call::Status::IN_PROGRESS)
-            && !accountProperties.autoAnswer) {
+            && !accountProperties.autoAnswer && !currentCall.isOutgoing) {
             showNotification(accountId, convInfo.uid);
             return;
         }
@@ -673,27 +679,35 @@ CallAdapter::sipInputPanelPlayDTMF(const QString& key)
 void
 CallAdapter::updateCallOverlay(const lrc::api::conversation::Info& convInfo)
 {
+    qWarning() << "CallAdapter::updateCallOverlay";
     auto& accInfo = lrcInstance_->accountModel().getAccountInfo(accountId_);
     auto* callModel = accInfo.callModel.get();
 
-    auto* call = lrcInstance_->getCallInfoForConversation(convInfo);
-    if (!call) {
+    const auto* callInfo = lrcInstance_->getCallInfoForConversation(convInfo);
+    const auto currentCallId = lrcInstance_->getCurrentCallId();
+    if (!callInfo || callInfo->id != currentCallId)
         return;
-    }
 
-    bool isPaused = call->status == lrc::api::call::Status::PAUSED;
-    bool isAudioOnly = call->isAudioOnly && !isPaused;
-    bool isAudioMuted = call->audioMuted && (call->status != lrc::api::call::Status::PAUSED);
-    bool isVideoMuted = call->isAudioOnly || (call->videoMuted && !isPaused);
-    bool isGrid = call->layout == lrc::api::call::Layout::GRID;
+    bool isPaused = callInfo->status == lrc::api::call::Status::PAUSED;
+    bool isAudioOnly = callInfo->isAudioOnly && !isPaused;
+    bool isAudioMuted = callInfo->status == lrc::api::call::Status::PAUSED;
+    bool isGrid = callInfo->layout == lrc::api::call::Layout::GRID;
     QString previewId {};
-    if (!isAudioOnly && !isVideoMuted && call->status == lrc::api::call::Status::IN_PROGRESS) {
-        for (const auto& media : call->mediaList) {
-            if (media["MEDIA_TYPE"] == "MEDIA_TYPE_VIDEO") {
-                if (media["ENABLED"] == "true" && media["MUTED"] == "false") {
-                    previewId = media["SOURCE"];
-                    break;
+    bool isVideoMuted = false;
+    if (callInfo->status != lrc::api::call::Status::ENDED) {
+        for (const auto& media : callInfo->mediaList) {
+            if (media[DRing::Media::MediaAttributeKey::MEDIA_TYPE]
+                == DRing::Media::Details::MEDIA_TYPE_VIDEO) {
+                if (media[DRing::Media::MediaAttributeKey::ENABLED] == TRUE_STR
+                    && media[DRing::Media::MediaAttributeKey::MUTED] == FALSE_STR) {
+                    if (previewId.isEmpty()) {
+                        previewId = media[DRing::Media::MediaAttributeKey::SOURCE];
+                    }
+                    isVideoMuted |= media[DRing::Media::MediaAttributeKey::SOURCE].startsWith(
+                        DRing::Media::VideoProtocolPrefix::CAMERA);
                 }
+            } else if (media[DRing::Media::MediaAttributeKey::LABEL] == "audio_0") {
+                isAudioMuted |= media[DRing::Media::MediaAttributeKey::MUTED] == TRUE_STR;
             }
         }
     }
@@ -795,7 +809,8 @@ CallAdapter::minimizeParticipant(const QString& uri)
                     && call.layout == lrc::api::call::Layout::ONE_WITH_SMALL) {
                     auto deviceId = participant[lrc::api::ParticipantsInfosStrings::DEVICE]
                                         .toString();
-                    auto streamId = participant[lrc::api::ParticipantsInfosStrings::STREAMID].toString();
+                    auto streamId = participant[lrc::api::ParticipantsInfosStrings::STREAMID]
+                                        .toString();
                     callModel->setActiveStream(confId, uri, deviceId, streamId, false);
                 }
             }
@@ -1042,7 +1057,7 @@ CallAdapter::holdThisCallToggle()
 }
 
 void
-CallAdapter::muteThisCallToggle(bool mute)
+CallAdapter::muteAudioToggle()
 {
     const auto callId = lrcInstance_->getCallIdForConversationUid(lrcInstance_->get_selectedConvUid(),
                                                                   accountId_);
@@ -1051,11 +1066,12 @@ CallAdapter::muteThisCallToggle(bool mute)
     }
     auto* callModel = lrcInstance_->getCurrentCallModel();
     if (callModel->hasCall(callId)) {
-        callModel->requestMediaChange(callId,
-                                      "audio_0",
-                                      lrcInstance_->avModel().getCurrentVideoCaptureDevice(),
-                                      lrc::api::NewCallModel::MediaRequestType::CAMERA,
-                                      mute);
+        const auto callInfo = lrcInstance_->getCurrentCallModel()->getCall(callId);
+        auto mute = false;
+        for (const auto& m : callInfo.mediaList)
+            if (m[DRing::Media::MediaAttributeKey::LABEL] == "audio_0")
+                mute = m[DRing::Media::MediaAttributeKey::MUTED] == FALSE_STR;
+        callModel->muteMedia(callId, "audio_0", mute);
     }
 }
 
@@ -1074,7 +1090,7 @@ CallAdapter::recordThisCallToggle()
 }
 
 void
-CallAdapter::videoPauseThisCallToggle(bool mute)
+CallAdapter::muteCameraToggle()
 {
     const auto callId = lrcInstance_->getCallIdForConversationUid(lrcInstance_->get_selectedConvUid(),
                                                                   accountId_);
@@ -1083,13 +1099,29 @@ CallAdapter::videoPauseThisCallToggle(bool mute)
     }
     auto* callModel = lrcInstance_->getCurrentCallModel();
     if (callModel->hasCall(callId)) {
-        callModel->requestMediaChange(callId,
-                                      "video_0",
-                                      lrcInstance_->avModel().getCurrentVideoCaptureDevice(),
-                                      lrc::api::NewCallModel::MediaRequestType::CAMERA,
-                                      mute);
-        // media label should come from qml
-        // also thi function can me emrged with "muteThisCallToggle"
+        const auto callInfo = lrcInstance_->getCurrentCallModel()->getCall(callId);
+        auto mute = false;
+        for (const auto& m : callInfo.mediaList) {
+            if (m[DRing::Media::MediaAttributeKey::SOURCE].startsWith(
+                    DRing::Media::VideoProtocolPrefix::CAMERA)
+                && m[DRing::Media::MediaAttributeKey::MEDIA_TYPE]
+                       == DRing::Media::Details::MEDIA_TYPE_VIDEO) {
+                mute = m[DRing::Media::MediaAttributeKey::MUTED] == FALSE_STR;
+            }
+        }
+
+        // Note: here we do not use mute, because for video we can have several inputs, so if we are
+        // sharing and showing the camera, we just want to remove the camera
+        // TODO Enum
+        if (mute)
+            callModel->removeMedia(callId,
+                                   DRing::Media::Details::MEDIA_TYPE_VIDEO,
+                                   DRing::Media::VideoProtocolPrefix::CAMERA,
+                                   mute);
+        else
+            callModel->addMedia(callId,
+                                lrcInstance_->avModel().getCurrentVideoCaptureDevice(),
+                                lrc::api::NewCallModel::MediaRequestType::CAMERA);
     }
 }
 
diff --git a/src/app/calladapter.h b/src/app/calladapter.h
index 09b0e45ea..c3f970067 100644
--- a/src/app/calladapter.h
+++ b/src/app/calladapter.h
@@ -71,9 +71,9 @@ public:
     Q_INVOKABLE bool isHandRaised(const QString& uri = {}) const;
     Q_INVOKABLE void raiseHand(const QString& uri, const QString& deviceId, bool state);
     Q_INVOKABLE void holdThisCallToggle();
-    Q_INVOKABLE void muteThisCallToggle(bool mute);
     Q_INVOKABLE void recordThisCallToggle();
-    Q_INVOKABLE void videoPauseThisCallToggle(bool mute);
+    Q_INVOKABLE void muteAudioToggle();
+    Q_INVOKABLE void muteCameraToggle();
     Q_INVOKABLE bool isRecordingThisCall();
     Q_INVOKABLE QVariantList getConferencesInfos() const;
     Q_INVOKABLE void muteParticipant(const QString& accountUri, const QString& deviceId, const QString& sinkId, const bool state);
diff --git a/src/app/constant/JamiStrings.qml b/src/app/constant/JamiStrings.qml
index 69e2e18b3..06050a032 100644
--- a/src/app/constant/JamiStrings.qml
+++ b/src/app/constant/JamiStrings.qml
@@ -220,8 +220,8 @@ Item {
     property string hangup: qsTr("End call")
     property string pauseCall: qsTr("Pause call")
     property string resumeCall: qsTr("Resume call")
-    property string pauseVideo: qsTr("Pause video")
-    property string resumeVideo: qsTr("Resume video")
+    property string muteCamera: qsTr("Mute camera")
+    property string unmuteCamera: qsTr("Unmute camera")
     property string addParticipant: qsTr("Add participant")
     property string addParticipants: qsTr("Add participants")
     property string details: qsTr("Details")
diff --git a/src/app/lrcinstance.cpp b/src/app/lrcinstance.cpp
index 619f506a8..ff097bbc1 100644
--- a/src/app/lrcinstance.cpp
+++ b/src/app/lrcinstance.cpp
@@ -403,11 +403,11 @@ LRCInstance::monitor(bool continuous)
 }
 
 QString
-LRCInstance::getCurrentCallId()
+LRCInstance::getCurrentCallId(bool forceCallOnly)
 {
     try {
         const auto& convInfo = getConversationFromConvUid(get_selectedConvUid());
-        auto call = getCallInfoForConversation(convInfo);
+        auto call = getCallInfoForConversation(convInfo, forceCallOnly);
         return call ? call->id : QString();
     } catch (...) {
         return QString();
diff --git a/src/app/lrcinstance.h b/src/app/lrcinstance.h
index 900f4207f..2dbaca094 100644
--- a/src/app/lrcinstance.h
+++ b/src/app/lrcinstance.h
@@ -88,11 +88,11 @@ public:
 
     const account::Info& getAccountInfo(const QString& accountId);
     const account::Info& getCurrentAccountInfo();
-    QString getCurrentCallId();
+    QString getCurrentCallId(bool forceCallOnly = false);
     QString getCallIdForConversationUid(const QString& convUid, const QString& accountId);
     const call::Info* getCallInfo(const QString& callId, const QString& accountId);
     const call::Info* getCallInfoForConversation(const conversation::Info& convInfo,
-                                                 bool forceCallOnly = {});
+                                                 bool forceCallOnly = false);
     const conversation::Info& getConversationFromConvUid(const QString& convUid,
                                                          const QString& accountId = {});
     const conversation::Info& getConversationFromPeerUri(const QString& peerUri,
diff --git a/src/app/mainview/components/CallActionBar.qml b/src/app/mainview/components/CallActionBar.qml
index abb748704..e7fc04a6a 100644
--- a/src/app/mainview/components/CallActionBar.qml
+++ b/src/app/mainview/components/CallActionBar.qml
@@ -173,7 +173,7 @@ Control {
                     muteAlertActive = true
                     muteAlertMessage = JamiStrings.participantModIsStillMuted
                 }
-                CallAdapter.muteThisCallToggle(!muteAudioAction.checked)
+                CallAdapter.muteAudioToggle()
             }
             checkable: true
             icon.source: checked ?
@@ -193,13 +193,13 @@ Control {
         },
         Action {
             id: muteVideoAction
-            onTriggered: CallAdapter.videoPauseThisCallToggle(!isVideoMuted)
+            onTriggered: CallAdapter.muteCameraToggle()
             checkable: true
             icon.source: checked ?
                              JamiResources.videocam_off_24dp_svg :
                              JamiResources.videocam_24dp_svg
             icon.color: checked ? "red" : "white"
-            text: !checked ? JamiStrings.pauseVideo : JamiStrings.resumeVideo
+            text: !checked ? JamiStrings.muteCamera : JamiStrings.unmuteCamera
             property var menuAction: videoInputMenuAction
         }
     ]
diff --git a/src/app/mainview/components/CallOverlay.qml b/src/app/mainview/components/CallOverlay.qml
index 4940a099e..2bd7002e2 100644
--- a/src/app/mainview/components/CallOverlay.qml
+++ b/src/app/mainview/components/CallOverlay.qml
@@ -45,7 +45,7 @@ Item {
     property bool isConference
     property bool isGrid
     property bool localHandRaised
-    property bool sharingActive: AvAdapter.currentRenderingDeviceType === Video.DeviceType.DISPLAY || AvAdapter.currentRenderingDeviceType === Video.DeviceType.FILE
+    property bool sharingActive: AvAdapter.isSharing()
     property string callId: ""
 
     signal chatButtonClicked
@@ -129,7 +129,6 @@ Item {
         mode: JamiFileDialog.Mode.OpenFile
 
         onAccepted: {
-            AvAdapter.muteCamera = !sharingActive && root.isVideoMuted
             AvAdapter.shareFile(jamiFileDialog.file)
         }
     }
@@ -154,7 +153,6 @@ Item {
     }
 
     function openShareScreen() {
-        AvAdapter.muteCamera = !sharingActive && root.isVideoMuted
         if (Qt.application.screens.length === 1) {
             AvAdapter.shareEntireScreen(0)
         } else {
@@ -164,7 +162,6 @@ Item {
     }
 
     function openShareWindow() {
-        AvAdapter.muteCamera = !sharingActive && root.isVideoMuted
         AvAdapter.getListWindows()
         if (AvAdapter.windowsNames.length >= 1) {
             SelectScreenWindowCreation.createSelectScreenWindowObject(appWindow)
@@ -173,7 +170,6 @@ Item {
     }
 
     function openShareScreenArea() {
-        AvAdapter.muteCamera = !sharingActive && root.isVideoMuted
         if (Qt.platform.os !== "windows") {
             AvAdapter.shareScreenArea(0, 0, 0, 0)
         } else {
diff --git a/src/app/mainview/components/CallViewContextMenu.qml b/src/app/mainview/components/CallViewContextMenu.qml
index b9555dffd..f33f43ff6 100644
--- a/src/app/mainview/components/CallViewContextMenu.qml
+++ b/src/app/mainview/components/CallViewContextMenu.qml
@@ -103,7 +103,7 @@ ContextMenuAutoLoader {
         GeneralMenuItem {
             id: stopSharing
 
-            canTrigger: sharingActive
+            canTrigger: AvAdapter.isSharing()
                         && !isSIP && !isVideoMuted
             itemName: JamiStrings.stopSharing
             iconSource: JamiResources.share_stop_black_24dp_svg
@@ -118,9 +118,6 @@ ContextMenuAutoLoader {
             itemName: JamiStrings.shareScreen
             iconSource: JamiResources.laptop_black_24dp_svg
             onClicked: {
-                if (AvAdapter.currentRenderingDeviceType !== Video.DeviceType.DISPLAY && AvAdapter.currentRenderingDeviceType !== Video.DeviceType.FILE) {
-                    AvAdapter.muteCamera = root.isVideoMuted
-                }
                 if (Qt.application.screens.length === 1) {
                     AvAdapter.shareEntireScreen(0)
                 } else {
@@ -137,9 +134,6 @@ ContextMenuAutoLoader {
             itemName: JamiStrings.shareWindow
             iconSource: JamiResources.window_black_24dp_svg
             onClicked: {
-                if (AvAdapter.currentRenderingDeviceType !== Video.DeviceType.DISPLAY && AvAdapter.currentRenderingDeviceType !== Video.DeviceType.FILE) {
-                    AvAdapter.muteCamera = root.isVideoMuted
-                }
                 AvAdapter.getListWindows()
                 if (AvAdapter.windowsNames.length >= 1) {
                     windowSelection = true
@@ -155,9 +149,6 @@ ContextMenuAutoLoader {
             itemName: JamiStrings.shareScreenArea
             iconSource: JamiResources.share_area_black_24dp_svg
             onClicked: {
-                if (AvAdapter.currentRenderingDeviceType !== Video.DeviceType.DISPLAY && AvAdapter.currentRenderingDeviceType !== Video.DeviceType.FILE) {
-                    AvAdapter.muteCamera = root.isVideoMuted
-                }
                 if (Qt.platform.os !== "windows") {
                     AvAdapter.shareScreenArea(0, 0, 0, 0)
                 } else {
diff --git a/src/app/mainview/components/OngoingCallPage.qml b/src/app/mainview/components/OngoingCallPage.qml
index 2999976d1..79e55c502 100644
--- a/src/app/mainview/components/OngoingCallPage.qml
+++ b/src/app/mainview/components/OngoingCallPage.qml
@@ -45,13 +45,6 @@ Rectangle {
     property bool isAudioOnly: false
     property var linkedWebview: null
     property string callPreviewId: ""
-    property bool sharingActive: AvAdapter.currentRenderingDeviceType === Video.DeviceType.DISPLAY
-                                 || AvAdapter.currentRenderingDeviceType === Video.DeviceType.FILE
-
-    onSharingActiveChanged: {
-        const deviceId = AvAdapter.currentRenderingDeviceId
-        previewRenderer.startWithId(deviceId, true)
-    }
 
     color: "black"
 
@@ -193,10 +186,16 @@ Rectangle {
             LocalVideo {
                 id: previewRenderer
 
-                visible: !callOverlay.isAudioOnly && participantsLayer.count == 0 && !callOverlay.isVideoMuted && !callOverlay.isPaused &&
-                         ((VideoDevices.listSize !== 0 && AvAdapter.currentRenderingDeviceType === Video.DeviceType.CAMERA) || AvAdapter.currentRenderingDeviceType !== Video.DeviceType.CAMERA )
+                visible: false
+                rendererId: ""
+
+                Connections {
+                    target: AvAdapter
 
-                rendererId: root.callPreviewId
+                    function onCurrentRenderingDeviceIdChanged() {
+                        previewRenderer.rendererId = AvAdapter.currentRenderingDeviceId
+                    }
+                }
 
                 height: width * invAspectRatio
                 width: Math.max(callPageMainRect.width / 5, JamiTheme.minimumPreviewWidth)
@@ -301,6 +300,9 @@ Rectangle {
                                              isAudioMuted, isVideoMuted,
                                              isSIP,
                                              isGrid)
+                        callOverlay.isVideoMuted = !AvAdapter.isCapturing()
+                        callOverlay.sharingActive = AvAdapter.isSharing()
+                        previewRenderer.visible = (AvAdapter.isSharing() || AvAdapter.isCapturing()) && participantsLayer.count == 0
                     }
 
                     function onShowOnHoldLabel(isPaused) {
diff --git a/src/libclient/api/call.h b/src/libclient/api/call.h
index 628c07c26..ab2e7823b 100644
--- a/src/libclient/api/call.h
+++ b/src/libclient/api/call.h
@@ -27,6 +27,7 @@
 #include <chrono>
 
 #include "typedefs.h"
+#include <media_const.h>
 
 namespace lrc {
 
@@ -139,7 +140,15 @@ struct Info
     Layout layout = Layout::GRID;
     VectorMapStringString mediaList = {};
     QSet<QString> peerRec {};
-    bool isConference = false;
+
+    bool hasMediaWithType(const QString& type, const QString& mediaType) const
+    {
+        for (const auto& m : mediaList)
+            if (m[DRing::Media::MediaAttributeKey::SOURCE].startsWith(type)
+                && m[DRing::Media::MediaAttributeKey::MEDIA_TYPE] == mediaType)
+                return true;
+        return false;
+    }
 };
 
 static inline bool
diff --git a/src/libclient/api/callparticipantsmodel.h b/src/libclient/api/callparticipantsmodel.h
index 37c8a8b9f..e397f2213 100644
--- a/src/libclient/api/callparticipantsmodel.h
+++ b/src/libclient/api/callparticipantsmodel.h
@@ -153,7 +153,10 @@ public:
     /**
      * @return the conference layout
      */
-    call::Layout getLayout() const { return hostLayout_; }
+    call::Layout getLayout() const
+    {
+        return hostLayout_;
+    }
 
     /**
      * @param index participant index
@@ -175,7 +178,7 @@ private:
     QMap<QString, ParticipantInfos> candidates_;
     // Participants ordered
     QMap<QString, ParticipantInfos> participants_;
-    QList<QString> validUris_;
+    QList<QString> validMedias_;
     int idx_ = 0;
 
     const NewCallModel& linked_;
diff --git a/src/libclient/api/newcallmodel.h b/src/libclient/api/newcallmodel.h
index 2c6edc8b3..0d105028d 100644
--- a/src/libclient/api/newcallmodel.h
+++ b/src/libclient/api/newcallmodel.h
@@ -83,21 +83,36 @@ public:
     QString createCall(const QString& uri,
                        bool isAudioOnly = false,
                        VectorMapStringString mediaList = {});
-
     /**
-     * Request a media change in a ongoing call.
-     * @param  accountId
-     * @param  callId
-     * @param  mediaLabel label of media to be changed
-     * @param source
-     * @param type
+     * Add a new media to the current list
+     * @param callId
+     * @param source        Of the media
+     * @param type          Audio/video
+     * @param mute
+     * @note For now, we only support video, as multistream only supports video in the daemon
+     */
+    void addMedia(const QString& callId,
+                  const QString& source,
+                  MediaRequestType type,
+                  bool mute = false);
+    /**
+     * Mute a media
+     * @param callId
+     * @param label        Of the media (audio_0, video_0, etc)
      * @param mute
      */
-    void requestMediaChange(const QString& callId,
-                            const QString& mediaLabel,
-                            const QString& source,
-                            MediaRequestType type,
-                            bool mute);
+    void muteMedia(const QString& callId, const QString& label, bool mute);
+    /**
+     * Remove a media from the current list
+     * @param callId
+     * @param source        Of the media
+     * @param type          Audio/video
+     * @param mute
+     */
+    void removeMedia(const QString& callId,
+                     const QString& mediaType,
+                     const QString& type,
+                     bool muteCamera);
 
     /**
      * Get the call from its call id
@@ -188,14 +203,6 @@ public:
      */
     void togglePause(const QString& callId) const;
 
-    /**
-     * @deprecated Use requestMediaChange instead
-     * Toggle a media on a call
-     * @param callId
-     * @param media {AUDIO, VIDEO}
-     */
-    void toggleMedia(const QString& callId, const NewCallModel::Media media);
-
     /**
      * Not implemented yet
      */
diff --git a/src/libclient/callparticipantsmodel.cpp b/src/libclient/callparticipantsmodel.cpp
index b4e7cbeea..1e44db7e5 100644
--- a/src/libclient/callparticipantsmodel.cpp
+++ b/src/libclient/callparticipantsmodel.cpp
@@ -48,9 +48,9 @@ void
 CallParticipants::update(const VectorMapStringString& infos)
 {
     std::lock_guard<std::mutex> lk(updateMtx_);
-    validUris_.clear();
+    validMedias_.clear();
     filterCandidates(infos);
-    validUris_.sort();
+    validMedias_.sort();
 
     idx_ = 0;
     QList<QString> keys {};
@@ -59,16 +59,16 @@ CallParticipants::update(const VectorMapStringString& infos)
         keys = participants_.keys();
     }
     for (const auto& key : keys) {
-        auto keyIdx = validUris_.indexOf(key);
-        if (keyIdx < 0 || keyIdx >= validUris_.size())
+        auto keyIdx = validMedias_.indexOf(key);
+        if (keyIdx < 0 || keyIdx >= validMedias_.size())
             removeParticipant(idx_);
         else
             idx_++;
     }
 
     idx_ = 0;
-    for (const auto& partUri : validUris_) {
-        addParticipant(candidates_[partUri]);
+    for (const auto& partMedia : validMedias_) {
+        addParticipant(candidates_[partMedia]);
         idx_++;
     }
 
@@ -113,9 +113,9 @@ CallParticipants::addParticipant(const ParticipantInfos& participant)
     bool added {false};
     {
         std::lock_guard<std::mutex> lk(participantsMtx_);
-        auto it = participants_.find(participant.uri);
+        auto it = participants_.find(participant.sinkId);
         if (it == participants_.end()) {
-            participants_.insert(participants_.begin() + idx_, participant.uri, participant);
+            participants_.insert(participants_.begin() + idx_, participant.sinkId, participant);
             added = true;
         } else {
             if (participant == (*it))
@@ -150,10 +150,11 @@ CallParticipants::filterCandidates(const VectorMapStringString& infos)
                 }
             }
         }
+        auto media = candidate[ParticipantsInfosStrings::STREAMID];
         if (candidate[ParticipantsInfosStrings::W].toInt() != 0
             && candidate[ParticipantsInfosStrings::H].toInt() != 0) {
-            validUris_.append(peerId);
-            candidates_.insert(peerId, ParticipantInfos(candidate, callId_, peerId));
+            validMedias_.append(media);
+            candidates_.insert(media, ParticipantInfos(candidate, callId_, peerId));
         }
     }
 }
diff --git a/src/libclient/newcallmodel.cpp b/src/libclient/newcallmodel.cpp
index 97f7dc3f2..6d74bfc2a 100644
--- a/src/libclient/newcallmodel.cpp
+++ b/src/libclient/newcallmodel.cpp
@@ -352,8 +352,8 @@ NewCallModel::updateCallMediaList(const QString& callId, bool acceptVideo)
                  it++) {
                 if ((*it)[MediaAttributeKey::MEDIA_TYPE] == MediaAttributeValue::VIDEO
                     && !acceptVideo) {
-                    (*it)[MediaAttributeKey::ENABLED] = "false";
-                    (*it)[MediaAttributeKey::MUTED] = "true";
+                    (*it)[MediaAttributeKey::ENABLED] = FALSE_STR;
+                    (*it)[MediaAttributeKey::MUTED] = TRUE_STR;
                     callInfos->second->videoMuted = !acceptVideo;
                 }
             }
@@ -368,8 +368,8 @@ NewCallModel::createCall(const QString& uri, bool isAudioOnly, VectorMapStringSt
     if (mediaList.isEmpty()) {
         MapStringString mediaAttribute = {{MediaAttributeKey::MEDIA_TYPE,
                                            MediaAttributeValue::AUDIO},
-                                          {MediaAttributeKey::ENABLED, "true"},
-                                          {MediaAttributeKey::MUTED, "false"},
+                                          {MediaAttributeKey::ENABLED, TRUE_STR},
+                                          {MediaAttributeKey::MUTED, FALSE_STR},
                                           {MediaAttributeKey::SOURCE, ""},
                                           {MediaAttributeKey::LABEL, "audio_0"}};
         mediaList.push_back(mediaAttribute);
@@ -406,64 +406,57 @@ NewCallModel::createCall(const QString& uri, bool isAudioOnly, VectorMapStringSt
 }
 
 void
-NewCallModel::requestMediaChange(const QString& callId,
-                                 const QString& mediaLabel,
-                                 const QString& uri,
-                                 MediaRequestType type,
-                                 bool mute)
+NewCallModel::muteMedia(const QString& callId, const QString& label, bool mute)
 {
-    // Main audio: audio_0
-    // Main video: video_0
-
     auto& callInfo = pimpl_->calls[callId];
     if (!callInfo)
         return;
 
-    QString sep = DRing::Media::VideoProtocolPrefix::SEPARATOR;
-    // The media label must contain either "audio" or "video" substring,
-    // otherwise the media will be considered as of un-supported type.
-    QString mediaType = mediaLabel.contains("audio")
-                            ? MediaAttributeValue::AUDIO
-                            : (mediaLabel.contains("video") ? MediaAttributeValue::VIDEO : "");
-
-    if (mediaType.isEmpty()) {
-        qCritical() << "No valide media type found in media label!";
+    auto proposedList = callInfo->mediaList;
+    for (auto& media : proposedList)
+        if (media[MediaAttributeKey::LABEL] == label)
+            media[MediaAttributeKey::MUTED] = mute ? TRUE_STR : FALSE_STR;
+    CallManager::instance().requestMediaChange(owner.id, callId, proposedList);
+}
+
+void
+NewCallModel::addMedia(const QString& callId,
+                       const QString& source,
+                       MediaRequestType type,
+                       bool mute)
+{
+    auto& callInfo = pimpl_->calls[callId];
+    if (!callInfo)
         return;
-    }
 
     QString resource {};
     QString srctype {};
-    auto proposedList = callInfo->mediaList;
-
-    int found = 0;
-
+    QString sep = DRing::Media::VideoProtocolPrefix::SEPARATOR;
     switch (type) {
     case MediaRequestType::FILESHARING: {
         // File sharing
-        resource = !uri.isEmpty() ? QString("%1%2%3")
-                                        .arg(DRing::Media::VideoProtocolPrefix::FILE)
-                                        .arg(sep)
-                                        .arg(QUrl(uri).toLocalFile())
-                                  : DRing::Media::VideoProtocolPrefix::NONE;
+        resource = !source.isEmpty() ? QString("%1%2%3")
+                                           .arg(DRing::Media::VideoProtocolPrefix::FILE)
+                                           .arg(sep)
+                                           .arg(QUrl(source).toLocalFile())
+                                     : DRing::Media::VideoProtocolPrefix::NONE;
         if (not resource.isEmpty())
             srctype = MediaAttributeValue::SRC_TYPE_FILE;
         break;
     }
     case MediaRequestType::SCREENSHARING: {
         // Screen/window sharing
-        resource = uri;
+        resource = source;
         srctype = MediaAttributeValue::SRC_TYPE_DISPLAY;
         break;
     }
     case MediaRequestType::CAMERA: {
         // Camera device
-        if (mediaLabel.contains("video")) {
-            resource = not uri.isEmpty() ? QString("%1%2%3")
-                                               .arg(DRing::Media::VideoProtocolPrefix::CAMERA)
-                                               .arg(sep)
-                                               .arg(uri)
-                                         : DRing::Media::VideoProtocolPrefix::NONE;
-        }
+        resource = not source.isEmpty() ? QString("%1%2%3")
+                                              .arg(DRing::Media::VideoProtocolPrefix::CAMERA)
+                                              .arg(sep)
+                                              .arg(source)
+                                        : DRing::Media::VideoProtocolPrefix::NONE;
         srctype = MediaAttributeValue::SRC_TYPE_CAPTURE_DEVICE;
         break;
     }
@@ -471,60 +464,102 @@ NewCallModel::requestMediaChange(const QString& callId,
         return;
     }
 
-    if (callInfo->type == call::Type::CONFERENCE) {
-        MapStringString mediaAttribute = {{MediaAttributeKey::MEDIA_TYPE, mediaType},
-                                          {MediaAttributeKey::ENABLED, "true"},
-                                          {MediaAttributeKey::MUTED, mute ? "true" : "false"},
-                                          {MediaAttributeKey::SOURCE_TYPE, srctype},
-                                          {MediaAttributeKey::SOURCE, resource},
-                                          {MediaAttributeKey::LABEL, mediaLabel}};
-        proposedList.push_back(mediaAttribute);
-    }
-
-    for (auto& item : proposedList) {
-        if (item[MediaAttributeKey::LABEL] == mediaLabel) {
-            mute = resource.isEmpty() ? item[MediaAttributeKey::MUTED] == "false" : mute;
-            item[MediaAttributeKey::ENABLED] = "true";
-            item[MediaAttributeKey::MUTED] = mute ? "true" : "false";
-            item[MediaAttributeKey::SOURCE_TYPE] = srctype;
-            // For now, only the video source can be changed by the client.
-            if (item[MediaAttributeKey::MEDIA_TYPE] == MediaAttributeValue::VIDEO) {
-                item[MediaAttributeKey::SOURCE] = resource.isEmpty()
-                                                      ? item[MediaAttributeKey::SOURCE]
-                                                      : resource;
-            }
+    auto proposedList = callInfo->mediaList;
+    MapStringString mediaAttribute = {{MediaAttributeKey::MEDIA_TYPE, MediaAttributeValue::VIDEO},
+                                      {MediaAttributeKey::ENABLED, TRUE_STR},
+                                      {MediaAttributeKey::MUTED, mute ? TRUE_STR : FALSE_STR},
+                                      {MediaAttributeKey::SOURCE, resource},
+                                      {MediaAttributeKey::LABEL, "video_1"}};
+    // if we're in a 1:1, we only show one preview, so, limit to 1 video (the new one)
+    auto participantsModel = pimpl_->participantsModel.find(callId);
+    auto isConf = participantsModel != pimpl_->participantsModel.end()
+                  && participantsModel->second->getParticipants().size() != 0;
+
+    auto replaced = false;
+    for (auto& media : proposedList) {
+        auto replace = media[MediaAttributeKey::MEDIA_TYPE] == MediaAttributeValue::VIDEO;
+        // In a 1:1 we replace the first video, in a conference we replace only if it's a muted
+        // video as we show multiple previews
+        if (isConf)
+            replace &= media[MediaAttributeKey::MUTED] == TRUE_STR;
+        if (replace) {
+            mediaAttribute[MediaAttributeKey::LABEL] = media[MediaAttributeKey::LABEL];
+            media = mediaAttribute;
+            replaced = true;
             break;
         }
-        found++;
     }
-
-    if (found == proposedList.size() && mediaLabel == "video_0") {
-        mute &= !resource.isEmpty();
-        MapStringString mediaAttribute = {{MediaAttributeKey::MEDIA_TYPE,
-                                           MediaAttributeValue::VIDEO},
-                                          {MediaAttributeKey::ENABLED, "true"},
-                                          {MediaAttributeKey::MUTED, mute ? "true" : "false"},
-                                          {MediaAttributeKey::SOURCE_TYPE, srctype},
-                                          {MediaAttributeKey::SOURCE, resource},
-                                          {MediaAttributeKey::LABEL, mediaLabel}};
+    if (!replaced)
         proposedList.push_back(mediaAttribute);
-    }
+
+    if (isConf && !resource.isEmpty())
+        pimpl_->lrc.getAVModel().startPreview(resource);
 
     CallManager::instance().requestMediaChange(owner.id, callId, proposedList);
+}
 
-    // If media existed and its mute state was changed here, then we should
-    // update the mediaList because we will not receive signal
-    // mediaNegotiationStatus
-    if (found < callInfo->mediaList.size()) {
-        callInfo->mediaList = proposedList;
-        if (mediaLabel.contains("audio_0")) {
-            callInfo->audioMuted = mute;
-        } else if (mediaLabel.contains("video_0")) {
-            callInfo->videoMuted = mute;
+void
+NewCallModel::removeMedia(const QString& callId,
+                          const QString& mediaType,
+                          const QString& type,
+                          bool muteCamera)
+{
+    auto& callInfo = pimpl_->calls[callId];
+    if (!callInfo)
+        return;
+    auto isVideo = mediaType == MediaAttributeValue::VIDEO;
+    auto newIdx = 0;
+    auto replaceIdx = false, hasVideo = false;
+    VectorMapStringString proposedList;
+    QString label;
+    for (const auto& media : callInfo->mediaList) {
+        if (media[MediaAttributeKey::MEDIA_TYPE] == mediaType
+            && media[MediaAttributeKey::SOURCE].startsWith(type)) {
+            replaceIdx = true;
+            label = media[MediaAttributeKey::LABEL];
+        } else {
+            if (media[MediaAttributeKey::MEDIA_TYPE] == mediaType) {
+                auto newMedia = media;
+                if (replaceIdx) {
+                    QString idxStr = QString::number(newIdx);
+                    newMedia[MediaAttributeKey::LABEL] = isVideo ? "video_" + idxStr
+                                                                 : "audio_" + idxStr;
+                }
+                proposedList.push_back(newMedia);
+                newIdx++;
+            } else {
+                proposedList.push_back(media);
+            }
+            hasVideo |= media[MediaAttributeKey::MEDIA_TYPE] == MediaAttributeValue::VIDEO;
         }
-        if (callInfo->status == call::Status::IN_PROGRESS)
-            Q_EMIT callInfosChanged(owner.id, callId);
     }
+
+    auto participantsModel = pimpl_->participantsModel.find(callId);
+    auto isConf = participantsModel != pimpl_->participantsModel.end()
+                  && participantsModel->second->getParticipants().size() != 0;
+    if (!isConf) {
+        // 1:1 call, in this case we only show one preview, and switch between sharing and camera
+        // preview So, if no video, replace by camera
+        if (!hasVideo)
+            addMedia(callInfo->id,
+                     pimpl_->lrc.getAVModel().getCurrentVideoCaptureDevice(),
+                     MediaRequestType::CAMERA,
+                     muteCamera);
+        return;
+    } else if (!hasVideo) {
+        // To receive the remote video, we need a muted camera
+        proposedList.push_back(
+            MapStringString {{MediaAttributeKey::MEDIA_TYPE, MediaAttributeValue::VIDEO},
+                             {MediaAttributeKey::ENABLED, TRUE_STR},
+                             {MediaAttributeKey::MUTED, TRUE_STR},
+                             {MediaAttributeKey::SOURCE, label},
+                             {MediaAttributeKey::LABEL, "video_0"}});
+    }
+
+    if (isVideo && !label.isEmpty())
+        pimpl_->lrc.getAVModel().stopPreview(label);
+
+    CallManager::instance().requestMediaChange(owner.id, callId, proposedList);
 }
 
 void
@@ -614,15 +649,6 @@ NewCallModel::togglePause(const QString& callId) const
     }
 }
 
-void
-NewCallModel::toggleMedia(const QString& callId, const NewCallModel::Media media)
-{
-    if (!hasCall(callId))
-        return;
-    auto mediaLabel = media == NewCallModel::Media::VIDEO ? "video_0" : "audio_0";
-    requestMediaChange(callId, mediaLabel, "", MediaRequestType::CAMERA, true);
-}
-
 void
 NewCallModel::setQuality(const QString& callId, const double quality) const
 {
@@ -965,8 +991,8 @@ NewCallModelPimpl::initCallFromDaemon()
         if (linked.owner.profileInfo.type == lrc::api::profile::Type::JAMI) {
             callInfo->peerUri = "ring:" + callInfo->peerUri;
         }
-        callInfo->videoMuted = details["VIDEO_MUTED"] == "true";
-        callInfo->audioMuted = details["AUDIO_MUTED"] == "true";
+        callInfo->videoMuted = details["VIDEO_MUTED"] == TRUE_STR;
+        callInfo->audioMuted = details["AUDIO_MUTED"] == TRUE_STR;
         callInfo->type = call::Type::DIALOG;
         VectorMapStringString infos = CallManager::instance().getConferenceInfos(linked.owner.id,
                                                                                  callId);
@@ -983,10 +1009,9 @@ NewCallModelPimpl::initCallFromDaemon()
 bool
 NewCallModelPimpl::checkMediaDeviceMuted(const MapStringString& mediaAttributes)
 {
-    return mediaAttributes[MediaAttributeKey::SOURCE_TYPE]
-               == MediaAttributeValue::SRC_TYPE_CAPTURE_DEVICE
-           && (mediaAttributes[MediaAttributeKey::ENABLED] == "false"
-               || mediaAttributes[MediaAttributeKey::MUTED] == "true");
+    return mediaAttributes[MediaAttributeKey::SOURCE].startsWith("camera:")
+           && (mediaAttributes[MediaAttributeKey::ENABLED] == FALSE_STR
+               || mediaAttributes[MediaAttributeKey::MUTED] == TRUE_STR);
 }
 
 void
@@ -1304,8 +1329,8 @@ NewCallModelPimpl::slotMediaChangeRequested(const QString& accountId,
             item[MediaAttributeKey::MUTED] = callInfo->mediaList[index][MediaAttributeKey::MUTED];
             item[MediaAttributeKey::ENABLED] = callInfo->mediaList[index][MediaAttributeKey::ENABLED];
         } else {
-            item[MediaAttributeKey::MUTED] = "true";
-            item[MediaAttributeKey::ENABLED] = "true";
+            item[MediaAttributeKey::MUTED] = TRUE_STR;
+            item[MediaAttributeKey::ENABLED] = TRUE_STR;
         }
     }
     CallManager::instance().answerMediaChangeRequest(linked.owner.id,
@@ -1408,7 +1433,7 @@ NewCallModelPimpl::slotMediaNegotiationStatus(const QString& callId,
     callInfo->videoMuted = true;
     for (const auto& item : mediaList) {
         if (item[MediaAttributeKey::MEDIA_TYPE] == MediaAttributeValue::VIDEO) {
-            if (item[MediaAttributeKey::ENABLED] == "true") {
+            if (item[MediaAttributeKey::ENABLED] == TRUE_STR) {
                 callInfo->isAudioOnly = false;
             }
             callInfo->videoMuted = checkMediaDeviceMuted(item);
@@ -1508,13 +1533,10 @@ NewCallModelPimpl::slotOnConferenceInfosUpdated(const QString& confId,
         }
     }
 
-    Q_EMIT linked.callInfosChanged(linked.owner.id, confId);
-    Q_EMIT linked.onParticipantsChanged(confId);
-
     for (auto& info : infos) {
         if (info["uri"].isEmpty()) {
-            it->second->videoMuted = info["videoMuted"] == "true";
-            it->second->audioMuted = info["audioLocalMuted"] == "true";
+            it->second->videoMuted = info["videoMuted"] == TRUE_STR;
+            it->second->audioMuted = info["audioLocalMuted"] == TRUE_STR;
         }
     }
 
@@ -1528,6 +1550,8 @@ NewCallModelPimpl::slotOnConferenceInfosUpdated(const QString& confId,
         calls[call]->audioMuted = it->second->audioMuted;
         Q_EMIT linked.callInfosChanged(linked.owner.id, call);
     }
+    Q_EMIT linked.callInfosChanged(linked.owner.id, confId);
+    Q_EMIT linked.onParticipantsChanged(confId);
 }
 
 bool
@@ -1560,6 +1584,9 @@ NewCallModelPimpl::slotConferenceCreated(const QString& accountId, const QString
                                                                              confId);
     auto participantsPtr = std::make_shared<CallParticipants>(infos, confId, linked);
     callInfo->layout = participantsPtr->getLayout();
+    VectorMapStringString mediaList = CallManager::instance().currentMediaList(linked.owner.id,
+                                                                               confId);
+    callInfo->mediaList = mediaList;
     participantsModel[confId] = participantsPtr;
 
     calls[confId] = callInfo;
diff --git a/src/libclient/qtwrapper/callmanager_wrap.h b/src/libclient/qtwrapper/callmanager_wrap.h
index ce327a7af..d4e1a264a 100644
--- a/src/libclient/qtwrapper/callmanager_wrap.h
+++ b/src/libclient/qtwrapper/callmanager_wrap.h
@@ -242,7 +242,10 @@ public:
 
     ~CallManagerInterface() {}
 
-    bool isValid() { return true; }
+    bool isValid()
+    {
+        return true;
+    }
 
 public Q_SLOTS: // METHODS
     bool accept(const QString& accountId, const QString& callId)
@@ -306,6 +309,13 @@ public Q_SLOTS: // METHODS
         return temp;
     }
 
+    VectorMapStringString currentMediaList(const QString& accountId, const QString& callId)
+    {
+        VectorMapStringString temp = convertVecMap(
+            DRing::currentMediaList(accountId.toStdString(), callId.toStdString()));
+        return temp;
+    }
+
     VectorMapStringString getConferenceInfos(const QString& accountId, const QString& confId)
     {
         VectorMapStringString temp = convertVecMap(
@@ -402,6 +412,8 @@ public Q_SLOTS: // METHODS
         return temp;
     }
 
+    // If peer doesn't support multiple ice, keep only the last audio/video
+    // This keep the old behaviour (if sharing both camera + sharing a file, will keep the shared file)
     bool requestMediaChange(const QString& accountId,
                             const QString& callId,
                             const VectorMapStringString& mediaList)
@@ -430,9 +442,15 @@ public Q_SLOTS: // METHODS
     }
     // END OF MULTISTREAM FUNCTIONS
 
-    void playDTMF(const QString& key) { DRing::playDTMF(key.toStdString()); }
+    void playDTMF(const QString& key)
+    {
+        DRing::playDTMF(key.toStdString());
+    }
 
-    void recordPlaybackSeek(double value) { DRing::recordPlaybackSeek(value); }
+    void recordPlaybackSeek(double value)
+    {
+        DRing::recordPlaybackSeek(value);
+    }
 
     bool refuse(const QString& accountId, const QString& callId)
     {
@@ -457,9 +475,15 @@ public Q_SLOTS: // METHODS
         return DRing::startRecordedFilePlayback(filepath.toStdString());
     }
 
-    void startTone(int start, int type) { DRing::startTone(start, type); }
+    void startTone(int start, int type)
+    {
+        DRing::startTone(start, type);
+    }
 
-    void stopRecordedFilePlayback() { DRing::stopRecordedFilePlayback(); }
+    void stopRecordedFilePlayback()
+    {
+        DRing::stopRecordedFilePlayback();
+    }
 
     bool toggleRecording(const QString& accountId, const QString& callId)
     {
@@ -492,9 +516,15 @@ public Q_SLOTS: // METHODS
                                      mute);
     }
 
-    void startSmartInfo(int refresh) { DRing::startSmartInfo(refresh); }
+    void startSmartInfo(int refresh)
+    {
+        DRing::startSmartInfo(refresh);
+    }
 
-    void stopSmartInfo() { DRing::stopSmartInfo(); }
+    void stopSmartInfo()
+    {
+        DRing::stopSmartInfo();
+    }
 
     void setConferenceLayout(const QString& accountId, const QString& confId, int layout)
     {
diff --git a/src/libclient/qtwrapper/conversions_wrap.hpp b/src/libclient/qtwrapper/conversions_wrap.hpp
index 74665949d..2c29d606c 100644
--- a/src/libclient/qtwrapper/conversions_wrap.hpp
+++ b/src/libclient/qtwrapper/conversions_wrap.hpp
@@ -183,9 +183,6 @@ convertStringInt(const std::map<std::string, int>& m)
     return temp;
 }
 
-constexpr static const char* TRUE_STR = "true";
-constexpr static const char* FALSE_STR = "false";
-
 static inline QString
 toQString(bool b) noexcept
 {
diff --git a/src/libclient/typedefs.h b/src/libclient/typedefs.h
index c22ceadc0..340df3370 100644
--- a/src/libclient/typedefs.h
+++ b/src/libclient/typedefs.h
@@ -41,6 +41,9 @@ typedef QMap<QString, QStringList> MapStringStringList;
 typedef QVector<QByteArray> VectorVectorByte;
 typedef uint64_t DataTransferId;
 
+constexpr static const char* TRUE_STR = "true";
+constexpr static const char* FALSE_STR = "false";
+
 // Adapted from libring DRing::DataTransferInfo
 struct DataTransferInfo
 {
@@ -172,17 +175,35 @@ public:
         return *this;
     }
 
-    bool operator!=(const T& other) const { return m_Flags != static_cast<uint>(other); }
+    bool operator!=(const T& other) const
+    {
+        return m_Flags != static_cast<uint>(other);
+    }
 
-    bool operator==(const T& other) const { return m_Flags == static_cast<uint>(other); }
+    bool operator==(const T& other) const
+    {
+        return m_Flags == static_cast<uint>(other);
+    }
 
-    bool operator==(const FlagPack<T>& other) const { return m_Flags == other.m_Flags; }
+    bool operator==(const FlagPack<T>& other) const
+    {
+        return m_Flags == other.m_Flags;
+    }
 
-    bool operator!() const { return !m_Flags; }
+    bool operator!() const
+    {
+        return !m_Flags;
+    }
 
-    operator bool() const { return m_Flags != 0; }
+    operator bool() const
+    {
+        return m_Flags != 0;
+    }
 
-    uint value() const { return m_Flags; }
+    uint value() const
+    {
+        return m_Flags;
+    }
 
 private:
     FlagPack(uint base)
-- 
GitLab