Skip to content
Snippets Groups Projects
Commit b1ec634e authored by Adrien Béraud's avatar Adrien Béraud
Browse files

SinkClient: allow child sinks to observe a parent sink with no client, cleanup

Without this patch, clients need to observe the main mixer sink to be
able to observe individual participants.

GitLab: jami-client-android#1100
Change-Id: I97c4b8d3d40049a81c94b13d9c96776c4030ab2f
parent ebb00d83
No related branches found
No related tags found
No related merge requests found
......@@ -337,69 +337,57 @@ SinkClient::SinkClient(const std::string& id, bool mixer)
}
void
SinkClient::update(Observable<std::shared_ptr<MediaFrame>>* /*obs*/,
const std::shared_ptr<MediaFrame>& frame_p)
SinkClient::sendFrameDirect(const std::shared_ptr<jami::MediaFrame>& frame_p)
{
#ifdef DEBUG_FPS
auto currentTime = std::chrono::system_clock::now();
const std::chrono::duration<double> seconds = currentTime - lastFrameDebug_;
++frameCount_;
if (seconds.count() > 1) {
auto fps = frameCount_ / seconds.count();
// Send the framerate in smartInfo
Smartools::getInstance().setFrameRate(id_, std::to_string(fps));
frameCount_ = 0;
lastFrameDebug_ = currentTime;
}
#endif
notify(frame_p);
std::unique_lock<std::mutex> lock(mtx_);
if (target_.push and not target_.pull) {
VideoFrame outFrame;
outFrame.copyFrom(*std::static_pointer_cast<VideoFrame>(frame_p));
DRing::FrameBuffer outFrame(av_frame_alloc());
av_frame_ref(outFrame.get(), std::static_pointer_cast<VideoFrame>(frame_p)->pointer());
if (crop_.w || crop_.h) {
outFrame.pointer()->crop_top = crop_.y;
outFrame.pointer()->crop_bottom = (size_t) outFrame.height() - crop_.y - crop_.h;
outFrame.pointer()->crop_left = crop_.x;
outFrame.pointer()->crop_right = (size_t) outFrame.width() - crop_.x - crop_.w;
av_frame_apply_cropping(outFrame.pointer(), AV_FRAME_CROP_UNALIGNED);
}
if (outFrame.height() != height_ || outFrame.width() != width_) {
setFrameSize(outFrame.width(), outFrame.height());
outFrame->crop_top = crop_.y;
outFrame->crop_bottom = (size_t) outFrame->height - crop_.y - crop_.h;
outFrame->crop_left = crop_.x;
outFrame->crop_right = (size_t) outFrame->width - crop_.x - crop_.w;
av_frame_apply_cropping(outFrame.get(), AV_FRAME_CROP_UNALIGNED);
}
if (outFrame->height != height_ || outFrame->width != width_) {
setFrameSize(outFrame->width, outFrame->height);
return;
}
notify(std::static_pointer_cast<MediaFrame>(frame_p));
target_.push(outFrame.getFrame());
return;
target_.push(std::move(outFrame));
}
bool doTransfer = (target_.pull != nullptr);
#if HAVE_SHM
doTransfer |= (shm_ && doShmTransfer_);
#endif
void
SinkClient::sendFrameTransformed(AVFrame* frame)
{
if (frame->width > 0 and frame->height > 0) {
if (auto buffer_ptr = target_.pull()) {
scaler_->scale(frame, buffer_ptr.get());
target_.push(std::move(buffer_ptr));
}
}
}
if (doTransfer) {
std::shared_ptr<VideoFrame>
SinkClient::applyTransform(VideoFrame& frame_p)
{
std::shared_ptr<VideoFrame> frame = std::make_shared<VideoFrame>();
#ifdef RING_ACCEL
auto desc = av_pix_fmt_desc_get(
(AVPixelFormat)(std::static_pointer_cast<VideoFrame>(frame_p))->format());
auto desc = av_pix_fmt_desc_get((AVPixelFormat)frame_p.format());
if (desc && (desc->flags & AV_PIX_FMT_FLAG_HWACCEL)) {
try {
frame = HardwareAccel::transferToMainMemory(*std::static_pointer_cast<VideoFrame>(
frame_p),
AV_PIX_FMT_NV12);
frame = HardwareAccel::transferToMainMemory(frame_p, AV_PIX_FMT_NV12);
} catch (const std::runtime_error& e) {
JAMI_ERR("[Sink:%p] Transfert to hardware acceleration memory failed: %s",
this,
e.what());
return;
return {};
}
} else
#endif
frame->copyFrom(*std::static_pointer_cast<VideoFrame>(frame_p));
frame->copyFrom(frame_p);
int angle = frame->getOrientation();
if (angle != rotation_) {
filter_ = getTransposeFilter(angle,
FILTER_INPUT_NAME,
......@@ -411,19 +399,57 @@ SinkClient::update(Observable<std::shared_ptr<MediaFrame>>* /*obs*/,
}
if (filter_) {
filter_->feedInput(frame->pointer(), FILTER_INPUT_NAME);
frame = std::static_pointer_cast<VideoFrame>(
std::shared_ptr<MediaFrame>(filter_->readOutput()));
frame = std::static_pointer_cast<VideoFrame>(std::shared_ptr<MediaFrame>(filter_->readOutput()));
}
notify(std::static_pointer_cast<MediaFrame>(frame));
if (crop_.w || crop_.h) {
frame->pointer()->crop_top = crop_.y;
frame->pointer()->crop_bottom = (size_t) frame->height() - crop_.y - crop_.h;
frame->pointer()->crop_left = crop_.x;
frame->pointer()->crop_right = (size_t) frame->width() - crop_.x - crop_.w;
av_frame_apply_cropping(frame->pointer(), AV_FRAME_CROP_UNALIGNED);
av_frame_apply_cropping(frame->pointer(), 0);
}
return frame;
}
void
SinkClient::update(Observable<std::shared_ptr<MediaFrame>>* /*obs*/,
const std::shared_ptr<MediaFrame>& frame_p)
{
#ifdef DEBUG_FPS
auto currentTime = std::chrono::system_clock::now();
std::chrono::duration<double> seconds = currentTime - lastFrameDebug_;
++frameCount_;
if (seconds.count() > 1) {
auto fps = frameCount_ / seconds.count();
// Send the framerate in smartInfo
Smartools::getInstance().setFrameRate(id_, std::to_string(fps));
frameCount_ = 0;
lastFrameDebug_ = currentTime;
}
#endif
std::unique_lock<std::mutex> lock(mtx_);
bool hasObservers = getObserversCount() != 0;
bool hasDirectListener = target_.push and not target_.pull;
bool hasTransformedListener = target_.push and target_.pull;
if (hasDirectListener) {
sendFrameDirect(frame_p);
return;
}
bool doTransfer = hasTransformedListener or hasObservers;
#if HAVE_SHM
doTransfer |= (shm_ && doShmTransfer_);
#endif
if (doTransfer) {
auto frame = applyTransform(*std::static_pointer_cast<VideoFrame>(frame_p));
if (not frame)
return;
notify(std::static_pointer_cast<MediaFrame>(frame));
if (frame->height() != height_ || frame->width() != width_) {
lock.unlock();
setFrameSize(frame->width(), frame->height());
......@@ -433,16 +459,8 @@ SinkClient::update(Observable<std::shared_ptr<MediaFrame>>* /*obs*/,
if (shm_ && doShmTransfer_)
shm_->renderFrame(*frame);
#endif
if (target_.pull) {
int width = frame->width();
int height = frame->height();
if (width > 0 && height > 0) {
if (auto buffer_ptr = target_.pull()) {
scaler_->scale(*frame, buffer_ptr.get());
target_.push(std::move(buffer_ptr));
}
}
}
if (hasTransformedListener)
sendFrameTransformed(frame->pointer());
}
}
......
......@@ -106,7 +106,16 @@ private:
std::unique_ptr<MediaFilter> filter_;
std::mutex mtx_;
void setRotation(int rotation);
void sendFrameDirect(const std::shared_ptr<jami::MediaFrame>&);
void sendFrameTransformed(AVFrame* frame);
/**
* Apply required transformations before sending frames to clients/observers:
* - Transfer the frame from gpu to main memory, if needed.
* - Rotate the frame as needed.
* - Apply cropping as needed
*/
std::shared_ptr<VideoFrame> applyTransform(VideoFrame& frame);
#ifdef DEBUG_FPS
unsigned frameCount_;
......
......@@ -42,14 +42,12 @@ VideoScaler::~VideoScaler()
void
VideoScaler::scale(const VideoFrame& input, VideoFrame& output){
scale(input, output.pointer());
scale(input.pointer(), output.pointer());
}
void
VideoScaler::scale(const VideoFrame& input, AVFrame* output_frame)
VideoScaler::scale(const AVFrame* input_frame, AVFrame* output_frame)
{
const auto input_frame = input.pointer();
ctx_ = sws_getCachedContext(ctx_,
input_frame->width,
input_frame->height,
......
......@@ -39,7 +39,7 @@ public:
VideoScaler();
~VideoScaler();
void reset();
void scale(const VideoFrame& input, AVFrame* output);
void scale(const AVFrame* input, AVFrame* output);
void scale(const VideoFrame& input, VideoFrame& output);
void scale_with_aspect(const VideoFrame& input, VideoFrame& output);
void scale_and_pad(const VideoFrame& input,
......
......@@ -111,10 +111,10 @@ public:
return false;
}
int getObserversCount()
size_t getObserversCount()
{
std::lock_guard<std::mutex> lk(mutex_);
return observers_.size();
return observers_.size() + priority_observers_.size();
}
protected:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment