Skip to content
Snippets Groups Projects
Commit d38858c1 authored by Aline Gondim Santos's avatar Aline Gondim Santos Committed by Sébastien Blin
Browse files

filesharing: consider audio in the proposed list

When adding/removing a file sharing we must add/remove
not only the video but also the audio media attributes
to the media change proposal.

GitLab: jami-daemon#485
Change-Id: Ifa6be8d500a43b1c2494501a4d6a6a6f009fdb62
parent 088c3674
No related branches found
No related tags found
No related merge requests found
...@@ -204,12 +204,12 @@ AvAdapter::shareFile(const QString& filePath) ...@@ -204,12 +204,12 @@ AvAdapter::shareFile(const QString& filePath)
&lrc::api::AVModel::fileOpened, &lrc::api::AVModel::fileOpened,
this, this,
[this, callId, filePath, resource](bool hasAudio, bool hasVideo) { [this, callId, filePath, resource](bool hasAudio, bool hasVideo) {
// TODO: add videos's audio while adding file sharing // TODO: allow audio only sharing
if (hasVideo) { if (hasVideo) { // only start sharing if video is available
lrcInstance_->avModel().pausePlayer(resource, false); lrcInstance_->avModel().pausePlayer(resource, false);
lrcInstance_->avModel().setAutoRestart(resource, true); lrcInstance_->avModel().setAutoRestart(resource, true);
lrcInstance_->getCurrentCallModel() lrcInstance_->getCurrentCallModel()
->addMedia(callId, filePath, lrc::api::CallModel::MediaRequestType::FILESHARING); ->addMedia(callId, filePath, lrc::api::CallModel::MediaRequestType::FILESHARING, false, hasAudio);
} else { } else {
// Close media player because we are not going to start sharing // Close media player because we are not going to start sharing
lrcInstance_->avModel().closeMediaPlayer(resource); lrcInstance_->avModel().closeMediaPlayer(resource);
......
...@@ -89,12 +89,29 @@ public: ...@@ -89,12 +89,29 @@ public:
* @param source Of the media * @param source Of the media
* @param type Audio/video * @param type Audio/video
* @param mute * @param mute
* @note For now, we only support video, as multistream only supports video in the daemon * @param shareAudio
*/ */
void addMedia(const QString& callId, void addMedia(const QString& callId,
const QString& source, const QString& source,
MediaRequestType type, MediaRequestType type,
bool mute = false); bool mute = false,
bool shareAudio = false);
/**
* get list of proposed medias
* @param mediaList
* @param callId
* @param source Of the media
* @param type Audio/video
* @param mute
* @param shareAudio
*/
VectorMapStringString getProposed(VectorMapStringString mediaList,
const QString& callId,
const QString& source,
MediaRequestType type,
bool mute,
bool shareAudio = false);
/** /**
* Mute a media * Mute a media
* @param callId * @param callId
......
...@@ -494,23 +494,28 @@ CallModel::replaceDefaultCamera(const QString& callId, const QString& deviceId) ...@@ -494,23 +494,28 @@ CallModel::replaceDefaultCamera(const QString& callId, const QString& deviceId)
CallManager::instance().requestMediaChange(owner.id, callId, proposedList); CallManager::instance().requestMediaChange(owner.id, callId, proposedList);
} }
void VectorMapStringString
CallModel::addMedia(const QString& callId, const QString& source, MediaRequestType type, bool mute) CallModel::getProposed(VectorMapStringString mediaList, const QString& callId, const QString& source, MediaRequestType type, bool mute, bool shareAudio)
{ {
auto& callInfo = pimpl_->calls[callId]; auto& callInfo = pimpl_->calls[callId];
if (!callInfo || source.isEmpty()) if (!callInfo || source.isEmpty())
return; return {};
QString resource {}; QString resource {};
auto id = 0; auto aid = 0;
for (const auto& media : callInfo->mediaList) { auto vid = 0;
for (const auto& media : mediaList) {
if (media[MediaAttributeKey::SOURCE] == source) if (media[MediaAttributeKey::SOURCE] == source)
break; break;
if (media[MediaAttributeKey::MEDIA_TYPE] == MediaAttributeValue::AUDIO)
aid++;
if (media[MediaAttributeKey::MEDIA_TYPE] == MediaAttributeValue::VIDEO) if (media[MediaAttributeKey::MEDIA_TYPE] == MediaAttributeValue::VIDEO)
id++; vid++;
} }
QString label = QString("video_%1").arg(id); QString alabel = QString("audio_%1").arg(aid);
QString vlabel = QString("video_%1").arg(vid);
QString sep = libjami::Media::VideoProtocolPrefix::SEPARATOR; QString sep = libjami::Media::VideoProtocolPrefix::SEPARATOR;
MapStringString audioMediaAttribute {};
switch (type) { switch (type) {
case MediaRequestType::FILESHARING: { case MediaRequestType::FILESHARING: {
// File sharing // File sharing
...@@ -519,6 +524,12 @@ CallModel::addMedia(const QString& callId, const QString& source, MediaRequestTy ...@@ -519,6 +524,12 @@ CallModel::addMedia(const QString& callId, const QString& source, MediaRequestTy
.arg(sep) .arg(sep)
.arg(QUrl(source).toLocalFile()) .arg(QUrl(source).toLocalFile())
: libjami::Media::VideoProtocolPrefix::NONE; : libjami::Media::VideoProtocolPrefix::NONE;
if (shareAudio)
audioMediaAttribute = {{MediaAttributeKey::MEDIA_TYPE, MediaAttributeValue::AUDIO},
{MediaAttributeKey::ENABLED, TRUE_STR},
{MediaAttributeKey::MUTED, mute ? TRUE_STR : FALSE_STR},
{MediaAttributeKey::SOURCE, resource},
{MediaAttributeKey::LABEL, alabel}};
break; break;
} }
case MediaRequestType::SCREENSHARING: { case MediaRequestType::SCREENSHARING: {
...@@ -536,22 +547,22 @@ CallModel::addMedia(const QString& callId, const QString& source, MediaRequestTy ...@@ -536,22 +547,22 @@ CallModel::addMedia(const QString& callId, const QString& source, MediaRequestTy
break; break;
} }
default: default:
return; return {};
} }
auto proposedList = callInfo->mediaList; VectorMapStringString proposedList {};
MapStringString mediaAttribute = {{MediaAttributeKey::MEDIA_TYPE, MediaAttributeValue::VIDEO}, MapStringString videoMediaAttribute = {{MediaAttributeKey::MEDIA_TYPE, MediaAttributeValue::VIDEO},
{MediaAttributeKey::ENABLED, TRUE_STR}, {MediaAttributeKey::ENABLED, TRUE_STR},
{MediaAttributeKey::MUTED, mute ? TRUE_STR : FALSE_STR}, {MediaAttributeKey::MUTED, mute ? TRUE_STR : FALSE_STR},
{MediaAttributeKey::SOURCE, resource}, {MediaAttributeKey::SOURCE, resource},
{MediaAttributeKey::LABEL, label}}; {MediaAttributeKey::LABEL, vlabel}};
// if we're in a 1:1, we only show one preview, so, limit to 1 video (the new one) // if we're in a 1:1, we only show one preview, so, limit to 1 video (the new one)
auto participantsModel = pimpl_->participantsModel.find(callId); auto participantsModel = pimpl_->participantsModel.find(callId);
auto isConf = participantsModel != pimpl_->participantsModel.end() auto isConf = participantsModel != pimpl_->participantsModel.end()
&& participantsModel->second->getParticipants().size() != 0; && participantsModel->second->getParticipants().size() != 0;
auto replaced = false; auto replaced = false;
for (auto& media : proposedList) { for (auto& media : mediaList) {
auto replace = media[MediaAttributeKey::MEDIA_TYPE] == MediaAttributeValue::VIDEO; auto replace = media[MediaAttributeKey::MEDIA_TYPE] == MediaAttributeValue::VIDEO;
// In a 1:1 we replace the first video, in a conference we replace only if it's a muted // In a 1:1 we replace the first video, in a conference we replace only if it's a muted
// video or if a new sharing is requested // video or if a new sharing is requested
...@@ -565,14 +576,32 @@ CallModel::addMedia(const QString& callId, const QString& source, MediaRequestTy ...@@ -565,14 +576,32 @@ CallModel::addMedia(const QString& callId, const QString& source, MediaRequestTy
|| type == MediaRequestType::SCREENSHARING); || type == MediaRequestType::SCREENSHARING);
} }
if (replace) { if (replace) {
mediaAttribute[MediaAttributeKey::LABEL] = media[MediaAttributeKey::LABEL]; videoMediaAttribute[MediaAttributeKey::LABEL] = media[MediaAttributeKey::LABEL];
media = mediaAttribute; media = videoMediaAttribute;
replaced = true; replaced = true;
break; }
if (!(media[MediaAttributeKey::SOURCE].startsWith(libjami::Media::VideoProtocolPrefix::FILE)
&& type == MediaRequestType::CAMERA)) {
proposedList.emplace_back(media);
} }
} }
if (!replaced) if (!replaced)
proposedList.push_back(mediaAttribute); proposedList.push_back(videoMediaAttribute);
if (!audioMediaAttribute.isEmpty())
proposedList.emplace_back(audioMediaAttribute);
return proposedList;
}
void
CallModel::addMedia(const QString& callId, const QString& source, MediaRequestType type, bool mute, bool shareAudio)
{
auto& callInfo = pimpl_->calls[callId];
if (!callInfo || source.isEmpty())
return;
auto proposedList = getProposed(callInfo->mediaList, callId, source, type, mute, shareAudio);
CallManager::instance().requestMediaChange(owner.id, callId, proposedList); CallManager::instance().requestMediaChange(owner.id, callId, proposedList);
callInfo->mediaList = proposedList; callInfo->mediaList = proposedList;
...@@ -600,17 +629,19 @@ CallModel::removeMedia(const QString& callId, ...@@ -600,17 +629,19 @@ CallModel::removeMedia(const QString& callId,
replaceIdx = true; replaceIdx = true;
label = media[MediaAttributeKey::LABEL]; label = media[MediaAttributeKey::LABEL];
} else { } else {
if (media[MediaAttributeKey::MEDIA_TYPE] == mediaType) { if (!media[MediaAttributeKey::SOURCE].startsWith(type)) {
auto newMedia = media; if (media[MediaAttributeKey::MEDIA_TYPE] == mediaType) {
if (replaceIdx) { auto newMedia = media;
QString idxStr = QString::number(newIdx); if (replaceIdx) {
newMedia[MediaAttributeKey::LABEL] = isVideo ? "video_" + idxStr QString idxStr = QString::number(newIdx);
: "audio_" + idxStr; newMedia[MediaAttributeKey::LABEL] = isVideo ? "video_" + idxStr
: "audio_" + idxStr;
}
proposedList.push_back(newMedia);
newIdx++;
} else {
proposedList.push_back(media);
} }
proposedList.push_back(newMedia);
newIdx++;
} else {
proposedList.push_back(media);
} }
hasVideo |= media[MediaAttributeKey::MEDIA_TYPE] == MediaAttributeValue::VIDEO; hasVideo |= media[MediaAttributeKey::MEDIA_TYPE] == MediaAttributeValue::VIDEO;
} }
...@@ -622,12 +653,13 @@ CallModel::removeMedia(const QString& callId, ...@@ -622,12 +653,13 @@ CallModel::removeMedia(const QString& callId,
if (!isConf) { if (!isConf) {
// 1:1 call, in this case we only show one preview, and switch between sharing and camera // 1:1 call, in this case we only show one preview, and switch between sharing and camera
// preview So, if no video, replace by camera // preview So, if no video, replace by camera
if (!hasVideo) if (!hasVideo) {
addMedia(callInfo->id, proposedList = getProposed(proposedList,
pimpl_->lrc.getAVModel().getCurrentVideoCaptureDevice(), callInfo->id,
MediaRequestType::CAMERA, pimpl_->lrc.getAVModel().getCurrentVideoCaptureDevice(),
muteCamera); MediaRequestType::CAMERA,
return; muteCamera);
}
} else if (!hasVideo) { } else if (!hasVideo) {
// To receive the remote video, we need a muted camera // To receive the remote video, we need a muted camera
proposedList.push_back(MapStringString { proposedList.push_back(MapStringString {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment