diff --git a/src/media/audio/audio_rtp_session.h b/src/media/audio/audio_rtp_session.h index 7514437eabddc1ae818965b45b33db5863afc7ee..081b6c023f1cb002ad5cc3997102aab9e1b73904 100644 --- a/src/media/audio/audio_rtp_session.h +++ b/src/media/audio/audio_rtp_session.h @@ -51,6 +51,9 @@ public: void initRecorder(std::shared_ptr<MediaRecorder>& rec) override; void deinitRecorder(std::shared_ptr<MediaRecorder>& rec) override; + std::shared_ptr<AudioInput>& getAudioLocal() { return audioInput_; } + std::unique_ptr<AudioReceiveThread>& getAudioReceive() { return receiveThread_; } + private: void startSender(); void startReceiver(); diff --git a/src/plugin/callservicesmanager.h b/src/plugin/callservicesmanager.h index f4929f4861873643ade66c0676c5c3984e37352b..64ec983235b0b787d53ad983a1407640c53a0d9b 100644 --- a/src/plugin/callservicesmanager.h +++ b/src/plugin/callservicesmanager.h @@ -136,38 +136,40 @@ public: auto find = mediaHandlerToggled_.find(callId); if (find == mediaHandlerToggled_.end()) mediaHandlerToggled_[callId] = {}; - + bool applyRestart = false; for (auto it = callAVsubjects.begin(); it != callAVsubjects.end(); ++it) { if (it->first.id == callId) { for (auto& mediaHandler : callMediaHandlers) { if (getCallHandlerId(mediaHandler) == mediaHandlerId) { if (toggle) { - if (mediaHandlerToggled_[callId].find(mediaHandlerId) - == mediaHandlerToggled_[callId].end()) + notifyAVSubject(mediaHandler, it->first, it->second); + if (isAttached(mediaHandler) + && mediaHandlerToggled_[callId].find(mediaHandlerId) + == mediaHandlerToggled_[callId].end()) mediaHandlerToggled_[callId].insert(mediaHandlerId); - listAvailableSubjects(callId, mediaHandler); } else { mediaHandler->detach(); if (mediaHandlerToggled_[callId].find(mediaHandlerId) != mediaHandlerToggled_[callId].end()) mediaHandlerToggled_[callId].erase(mediaHandlerId); } - - /* In the case when the mediaHandler receives a hardware format - * frame and converts it to main memory, we need to restart the - * sender to unlink ours encoder and decoder. - * - * When we deactivate a mediaHandler, we try to relink the encoder - * and decoder by restarting the sender. - */ - Manager::instance() - .callFactory.getCall<SIPCall>(callId) - ->getVideoRtp() - .restartSender(); + if (it->first.type == StreamType::video && isVideoType(mediaHandler)) + applyRestart = true; + break; } } } } + + /* In the case when the mediaHandler receives a hardware format + * frame and converts it to main memory, we need to restart the + * sender to unlink ours encoder and decoder. + * + * When we deactivate a mediaHandler, we try to relink the encoder + * and decoder by restarting the sender. + */ + if (applyRestart) + Manager::instance().callFactory.getCall<SIPCall>(callId)->getVideoRtp().restartSender(); } /** @@ -185,6 +187,36 @@ public: return {}; } + bool isVideoType(const CallMediaHandlerPtr& mediaHandler) + { + const auto& details = mediaHandler->getCallMediaHandlerDetails(); + const auto& it = details.find("dataType"); + if (it != details.end()) { + JAMI_INFO() << "type: "; + bool status; + std::istringstream(it->second) >> status; + JAMI_INFO() << status; + return status; + } + JAMI_INFO() << "dataType not found"; + return true; + } + + bool isAttached(const CallMediaHandlerPtr& mediaHandler) + { + const auto& details = mediaHandler->getCallMediaHandlerDetails(); + const auto& it = details.find("attached"); + if (it != details.end()) { + JAMI_INFO() << "status: "; + bool status; + std::istringstream(it->second) >> status; + JAMI_INFO() << status; + return status; + } + JAMI_INFO() << "attached not found"; + return true; + } + std::map<std::string, std::vector<std::string>> getCallMediaHandlerStatus( const std::string& callId) { @@ -220,9 +252,8 @@ private: const StreamData& data, AVSubjectSPtr& subject) { - if (auto soSubject = subject.lock()) { + if (auto soSubject = subject.lock()) callMediaHandlerPtr->notifyAVFrameSubject(data, soSubject); - } } /** @@ -258,7 +289,7 @@ private: /** * @brief callMediaHandlers * Components that a plugin can register through registerCallMediaHandler service - * These objects can then be notified with notify notifyAVFrameSubject + * These objects can then be notified with notifySubject * whenever there is a new CallAVSubject like a video receive */ std::list<CallMediaHandlerPtr> callMediaHandlers; diff --git a/src/plugin/mediahandler.h b/src/plugin/mediahandler.h index 422a7991348f13d31512194dfe49ab427ba85d59..7a25457da3f8e03981ecf6663b05b702a5b0b6b2 100644 --- a/src/plugin/mediahandler.h +++ b/src/plugin/mediahandler.h @@ -40,8 +40,8 @@ public: * The id is the path of the plugin that created this MediaHandler * @return */ - std::string id() const { return id_;} - virtual void setId(const std::string& id) final {id_ = id;} + std::string id() const { return id_; } + virtual void setId(const std::string& id) final { id_ = id; } private: std::string id_; diff --git a/src/plugin/streamdata.h b/src/plugin/streamdata.h index 1f067725cbf3cb2d2552e51bf4f64d3003ba982c..522a67b3ee23d83294becf71a0da062022761ab8 100644 --- a/src/plugin/streamdata.h +++ b/src/plugin/streamdata.h @@ -29,7 +29,7 @@ struct StreamData , source {std::move(s)} {} const std::string id; - const bool direction; + const bool direction; // 0 when local; 1 when received const StreamType type; const std::string source; }; diff --git a/src/sip/sipcall.cpp b/src/sip/sipcall.cpp index e06b506eb53f3894f55439e4e57b9f7326ffcddc..4213d682a41bcb88bdfc2143f477a3b224b87987 100644 --- a/src/sip/sipcall.cpp +++ b/src/sip/sipcall.cpp @@ -130,38 +130,59 @@ SIPCall::getSIPAccount() const void SIPCall::createCallAVStreams() { + /** + * Map: maps the AudioFrame to an AVFrame + **/ + auto audioMap = [](const std::shared_ptr<jami::MediaFrame> m) -> AVFrame* { + return std::static_pointer_cast<AudioFrame>(m)->pointer(); + }; + + // Preview + if (auto& localAudio = avformatrtp_->getAudioLocal()) { + auto previewSubject = std::make_shared<MediaStreamSubject>(audioMap); + StreamData microStreamData {getCallId(), 0, StreamType::audio, getPeerNumber()}; + createCallAVStream(microStreamData, *localAudio, previewSubject); + } + + // Receive + if (auto& audioReceive = avformatrtp_->getAudioReceive()) { + auto receiveSubject = std::make_shared<MediaStreamSubject>(audioMap); + StreamData phoneStreamData {getCallId(), 1, StreamType::audio, getPeerNumber()}; + createCallAVStream(phoneStreamData, (AVMediaStream&) *audioReceive, receiveSubject); + } +#ifdef ENABLE_VIDEO if (hasVideo()) { /** * Map: maps the VideoFrame to an AVFrame **/ - auto map = [](const std::shared_ptr<jami::MediaFrame> m) -> AVFrame* { + auto videoMap = [](const std::shared_ptr<jami::MediaFrame> m) -> AVFrame* { return std::static_pointer_cast<VideoFrame>(m)->pointer(); }; // Preview if (auto& videoPreview = videortp_->getVideoLocal()) { - auto previewSubject = std::make_shared<MediaStreamSubject>(map); + auto previewSubject = std::make_shared<MediaStreamSubject>(videoMap); StreamData previewStreamData {getCallId(), 0, StreamType::video, getPeerNumber()}; createCallAVStream(previewStreamData, *videoPreview, previewSubject); } // Receive - auto& videoReceive = videortp_->getVideoReceive(); - - if (videoReceive) { - auto receiveSubject = std::make_shared<MediaStreamSubject>(map); + if (auto& videoReceive = videortp_->getVideoReceive()) { + auto receiveSubject = std::make_shared<MediaStreamSubject>(videoMap); StreamData receiveStreamData {getCallId(), 1, StreamType::video, getPeerNumber()}; createCallAVStream(receiveStreamData, *videoReceive, receiveSubject); } } +#endif } void SIPCall::createCallAVStream(const StreamData& StreamData, - MediaStream& streamSource, + AVMediaStream& streamSource, const std::shared_ptr<MediaStreamSubject>& mediaStreamSubject) { - const std::string AVStreamId = StreamData.id + std::to_string(StreamData.direction); + const std::string AVStreamId = StreamData.id + std::to_string(static_cast<int>(StreamData.type)) + + std::to_string(StreamData.direction); std::lock_guard<std::mutex> lk(avStreamsMtx_); auto it = callAVStreams.find(AVStreamId); if (it != callAVStreams.end()) @@ -1281,8 +1302,10 @@ SIPCall::stopAllMedia() #ifdef ENABLE_PLUGIN { std::lock_guard<std::mutex> lk(avStreamsMtx_); - callAVStreams.erase(getCallId() + "0"); - callAVStreams.erase(getCallId() + "1"); + callAVStreams.erase(getCallId() + "00"); // audio out + callAVStreams.erase(getCallId() + "01"); // audio in + callAVStreams.erase(getCallId() + "10"); // video out + callAVStreams.erase(getCallId() + "11"); // video in } jami::Manager::instance().getJamiPluginManager().getCallServicesManager().clearAVSubject( getCallId()); diff --git a/src/sip/sipcall.h b/src/sip/sipcall.h index 58cab3da4a95f302a8e2c84eef75b45070dbec7a..5db4f8224209d3094c47cc92dd4243a8fabac8c9 100644 --- a/src/sip/sipcall.h +++ b/src/sip/sipcall.h @@ -264,7 +264,7 @@ private: /** * Call Streams and some typedefs */ - using MediaStream = Observable<std::shared_ptr<MediaFrame>>; + using AVMediaStream = Observable<std::shared_ptr<MediaFrame>>; using MediaStreamSubject = PublishMapSubject<std::shared_ptr<MediaFrame>, AVFrame*>; #ifdef ENABLE_PLUGIN @@ -276,7 +276,7 @@ private: * @param mediaStreamSubject */ void createCallAVStream(const StreamData& StreamData, - MediaStream& streamSource, + AVMediaStream& streamSource, const std::shared_ptr<MediaStreamSubject>& mediaStreamSubject); /** * @brief createCallAVStreams