Skip to content
Snippets Groups Projects
Commit b1ec634e authored by Adrien Béraud's avatar Adrien Béraud
Browse files

SinkClient: allow child sinks to observe a parent sink with no client, cleanup

Without this patch, clients need to observe the main mixer sink to be
able to observe individual participants.

GitLab: jami-client-android#1100
Change-Id: I97c4b8d3d40049a81c94b13d9c96776c4030ab2f
parent ebb00d83
No related branches found
No related tags found
No related merge requests found
...@@ -337,69 +337,57 @@ SinkClient::SinkClient(const std::string& id, bool mixer) ...@@ -337,69 +337,57 @@ SinkClient::SinkClient(const std::string& id, bool mixer)
} }
void void
SinkClient::update(Observable<std::shared_ptr<MediaFrame>>* /*obs*/, SinkClient::sendFrameDirect(const std::shared_ptr<jami::MediaFrame>& frame_p)
const std::shared_ptr<MediaFrame>& frame_p)
{ {
#ifdef DEBUG_FPS notify(frame_p);
auto currentTime = std::chrono::system_clock::now();
const std::chrono::duration<double> seconds = currentTime - lastFrameDebug_;
++frameCount_;
if (seconds.count() > 1) {
auto fps = frameCount_ / seconds.count();
// Send the framerate in smartInfo
Smartools::getInstance().setFrameRate(id_, std::to_string(fps));
frameCount_ = 0;
lastFrameDebug_ = currentTime;
}
#endif
std::unique_lock<std::mutex> lock(mtx_); DRing::FrameBuffer outFrame(av_frame_alloc());
if (target_.push and not target_.pull) { av_frame_ref(outFrame.get(), std::static_pointer_cast<VideoFrame>(frame_p)->pointer());
VideoFrame outFrame;
outFrame.copyFrom(*std::static_pointer_cast<VideoFrame>(frame_p));
if (crop_.w || crop_.h) { if (crop_.w || crop_.h) {
outFrame.pointer()->crop_top = crop_.y; outFrame->crop_top = crop_.y;
outFrame.pointer()->crop_bottom = (size_t) outFrame.height() - crop_.y - crop_.h; outFrame->crop_bottom = (size_t) outFrame->height - crop_.y - crop_.h;
outFrame.pointer()->crop_left = crop_.x; outFrame->crop_left = crop_.x;
outFrame.pointer()->crop_right = (size_t) outFrame.width() - crop_.x - crop_.w; outFrame->crop_right = (size_t) outFrame->width - crop_.x - crop_.w;
av_frame_apply_cropping(outFrame.pointer(), AV_FRAME_CROP_UNALIGNED); av_frame_apply_cropping(outFrame.get(), AV_FRAME_CROP_UNALIGNED);
} }
if (outFrame.height() != height_ || outFrame.width() != width_) { if (outFrame->height != height_ || outFrame->width != width_) {
setFrameSize(outFrame.width(), outFrame.height()); setFrameSize(outFrame->width, outFrame->height);
return; return;
} }
notify(std::static_pointer_cast<MediaFrame>(frame_p)); target_.push(std::move(outFrame));
target_.push(outFrame.getFrame());
return;
} }
bool doTransfer = (target_.pull != nullptr); void
#if HAVE_SHM SinkClient::sendFrameTransformed(AVFrame* frame)
doTransfer |= (shm_ && doShmTransfer_); {
#endif if (frame->width > 0 and frame->height > 0) {
if (auto buffer_ptr = target_.pull()) {
scaler_->scale(frame, buffer_ptr.get());
target_.push(std::move(buffer_ptr));
}
}
}
if (doTransfer) { std::shared_ptr<VideoFrame>
SinkClient::applyTransform(VideoFrame& frame_p)
{
std::shared_ptr<VideoFrame> frame = std::make_shared<VideoFrame>(); std::shared_ptr<VideoFrame> frame = std::make_shared<VideoFrame>();
#ifdef RING_ACCEL #ifdef RING_ACCEL
auto desc = av_pix_fmt_desc_get( auto desc = av_pix_fmt_desc_get((AVPixelFormat)frame_p.format());
(AVPixelFormat)(std::static_pointer_cast<VideoFrame>(frame_p))->format());
if (desc && (desc->flags & AV_PIX_FMT_FLAG_HWACCEL)) { if (desc && (desc->flags & AV_PIX_FMT_FLAG_HWACCEL)) {
try { try {
frame = HardwareAccel::transferToMainMemory(*std::static_pointer_cast<VideoFrame>( frame = HardwareAccel::transferToMainMemory(frame_p, AV_PIX_FMT_NV12);
frame_p),
AV_PIX_FMT_NV12);
} catch (const std::runtime_error& e) { } catch (const std::runtime_error& e) {
JAMI_ERR("[Sink:%p] Transfert to hardware acceleration memory failed: %s", JAMI_ERR("[Sink:%p] Transfert to hardware acceleration memory failed: %s",
this, this,
e.what()); e.what());
return; return {};
} }
} else } else
#endif #endif
frame->copyFrom(*std::static_pointer_cast<VideoFrame>(frame_p)); frame->copyFrom(frame_p);
int angle = frame->getOrientation(); int angle = frame->getOrientation();
if (angle != rotation_) { if (angle != rotation_) {
filter_ = getTransposeFilter(angle, filter_ = getTransposeFilter(angle,
FILTER_INPUT_NAME, FILTER_INPUT_NAME,
...@@ -411,19 +399,57 @@ SinkClient::update(Observable<std::shared_ptr<MediaFrame>>* /*obs*/, ...@@ -411,19 +399,57 @@ SinkClient::update(Observable<std::shared_ptr<MediaFrame>>* /*obs*/,
} }
if (filter_) { if (filter_) {
filter_->feedInput(frame->pointer(), FILTER_INPUT_NAME); filter_->feedInput(frame->pointer(), FILTER_INPUT_NAME);
frame = std::static_pointer_cast<VideoFrame>( frame = std::static_pointer_cast<VideoFrame>(std::shared_ptr<MediaFrame>(filter_->readOutput()));
std::shared_ptr<MediaFrame>(filter_->readOutput()));
} }
notify(std::static_pointer_cast<MediaFrame>(frame));
if (crop_.w || crop_.h) { if (crop_.w || crop_.h) {
frame->pointer()->crop_top = crop_.y; frame->pointer()->crop_top = crop_.y;
frame->pointer()->crop_bottom = (size_t) frame->height() - crop_.y - crop_.h; frame->pointer()->crop_bottom = (size_t) frame->height() - crop_.y - crop_.h;
frame->pointer()->crop_left = crop_.x; frame->pointer()->crop_left = crop_.x;
frame->pointer()->crop_right = (size_t) frame->width() - crop_.x - crop_.w; frame->pointer()->crop_right = (size_t) frame->width() - crop_.x - crop_.w;
av_frame_apply_cropping(frame->pointer(), AV_FRAME_CROP_UNALIGNED); av_frame_apply_cropping(frame->pointer(), 0);
}
return frame;
}
void
SinkClient::update(Observable<std::shared_ptr<MediaFrame>>* /*obs*/,
const std::shared_ptr<MediaFrame>& frame_p)
{
#ifdef DEBUG_FPS
auto currentTime = std::chrono::system_clock::now();
std::chrono::duration<double> seconds = currentTime - lastFrameDebug_;
++frameCount_;
if (seconds.count() > 1) {
auto fps = frameCount_ / seconds.count();
// Send the framerate in smartInfo
Smartools::getInstance().setFrameRate(id_, std::to_string(fps));
frameCount_ = 0;
lastFrameDebug_ = currentTime;
}
#endif
std::unique_lock<std::mutex> lock(mtx_);
bool hasObservers = getObserversCount() != 0;
bool hasDirectListener = target_.push and not target_.pull;
bool hasTransformedListener = target_.push and target_.pull;
if (hasDirectListener) {
sendFrameDirect(frame_p);
return;
} }
bool doTransfer = hasTransformedListener or hasObservers;
#if HAVE_SHM
doTransfer |= (shm_ && doShmTransfer_);
#endif
if (doTransfer) {
auto frame = applyTransform(*std::static_pointer_cast<VideoFrame>(frame_p));
if (not frame)
return;
notify(std::static_pointer_cast<MediaFrame>(frame));
if (frame->height() != height_ || frame->width() != width_) { if (frame->height() != height_ || frame->width() != width_) {
lock.unlock(); lock.unlock();
setFrameSize(frame->width(), frame->height()); setFrameSize(frame->width(), frame->height());
...@@ -433,16 +459,8 @@ SinkClient::update(Observable<std::shared_ptr<MediaFrame>>* /*obs*/, ...@@ -433,16 +459,8 @@ SinkClient::update(Observable<std::shared_ptr<MediaFrame>>* /*obs*/,
if (shm_ && doShmTransfer_) if (shm_ && doShmTransfer_)
shm_->renderFrame(*frame); shm_->renderFrame(*frame);
#endif #endif
if (target_.pull) { if (hasTransformedListener)
int width = frame->width(); sendFrameTransformed(frame->pointer());
int height = frame->height();
if (width > 0 && height > 0) {
if (auto buffer_ptr = target_.pull()) {
scaler_->scale(*frame, buffer_ptr.get());
target_.push(std::move(buffer_ptr));
}
}
}
} }
} }
......
...@@ -106,7 +106,16 @@ private: ...@@ -106,7 +106,16 @@ private:
std::unique_ptr<MediaFilter> filter_; std::unique_ptr<MediaFilter> filter_;
std::mutex mtx_; std::mutex mtx_;
void setRotation(int rotation); void sendFrameDirect(const std::shared_ptr<jami::MediaFrame>&);
void sendFrameTransformed(AVFrame* frame);
/**
* Apply required transformations before sending frames to clients/observers:
* - Transfer the frame from gpu to main memory, if needed.
* - Rotate the frame as needed.
* - Apply cropping as needed
*/
std::shared_ptr<VideoFrame> applyTransform(VideoFrame& frame);
#ifdef DEBUG_FPS #ifdef DEBUG_FPS
unsigned frameCount_; unsigned frameCount_;
......
...@@ -42,14 +42,12 @@ VideoScaler::~VideoScaler() ...@@ -42,14 +42,12 @@ VideoScaler::~VideoScaler()
void void
VideoScaler::scale(const VideoFrame& input, VideoFrame& output){ VideoScaler::scale(const VideoFrame& input, VideoFrame& output){
scale(input, output.pointer()); scale(input.pointer(), output.pointer());
} }
void void
VideoScaler::scale(const VideoFrame& input, AVFrame* output_frame) VideoScaler::scale(const AVFrame* input_frame, AVFrame* output_frame)
{ {
const auto input_frame = input.pointer();
ctx_ = sws_getCachedContext(ctx_, ctx_ = sws_getCachedContext(ctx_,
input_frame->width, input_frame->width,
input_frame->height, input_frame->height,
......
...@@ -39,7 +39,7 @@ public: ...@@ -39,7 +39,7 @@ public:
VideoScaler(); VideoScaler();
~VideoScaler(); ~VideoScaler();
void reset(); void reset();
void scale(const VideoFrame& input, AVFrame* output); void scale(const AVFrame* input, AVFrame* output);
void scale(const VideoFrame& input, VideoFrame& output); void scale(const VideoFrame& input, VideoFrame& output);
void scale_with_aspect(const VideoFrame& input, VideoFrame& output); void scale_with_aspect(const VideoFrame& input, VideoFrame& output);
void scale_and_pad(const VideoFrame& input, void scale_and_pad(const VideoFrame& input,
......
...@@ -111,10 +111,10 @@ public: ...@@ -111,10 +111,10 @@ public:
return false; return false;
} }
int getObserversCount() size_t getObserversCount()
{ {
std::lock_guard<std::mutex> lk(mutex_); std::lock_guard<std::mutex> lk(mutex_);
return observers_.size(); return observers_.size() + priority_observers_.size();
} }
protected: protected:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment