Skip to content
Snippets Groups Projects
Commit d5bfc81e authored by Adrien Béraud's avatar Adrien Béraud
Browse files

video_input/mobile: remove deprecated APIs, don't use loop

Allows switchInput on the same VideoInput instance on Android

Change-Id: I421a7d6e3f7b87d37a3108483cd280dc03c113cf
parent 8c7398e9
Branches
No related tags found
No related merge requests found
...@@ -408,8 +408,6 @@ void applySettings(const std::string& name, const std::map<std::string, std::str ...@@ -408,8 +408,6 @@ void applySettings(const std::string& name, const std::map<std::string, std::str
void addVideoDevice(const std::string &node); void addVideoDevice(const std::string &node);
void removeVideoDevice(const std::string &node); void removeVideoDevice(const std::string &node);
void setDeviceOrientation(const std::string& name, int angle); void setDeviceOrientation(const std::string& name, int angle);
uint8_t* obtainFrame(int length);
void releaseFrame(uint8_t* frame);
void registerSinkTarget(const std::string& sinkId, const DRing::SinkTarget& target); void registerSinkTarget(const std::string& sinkId, const DRing::SinkTarget& target);
std::string startLocalRecorder(const bool& audioOnly, const std::string& filepath); std::string startLocalRecorder(const bool& audioOnly, const std::string& filepath);
void stopLocalRecorder(const std::string& filepath); void stopLocalRecorder(const std::string& filepath);
......
...@@ -588,22 +588,6 @@ removeVideoDevice(const std::string &node) ...@@ -588,22 +588,6 @@ removeVideoDevice(const std::string &node)
{ {
jami::Manager::instance().getVideoManager().videoDeviceMonitor.removeDevice(node); jami::Manager::instance().getVideoManager().videoDeviceMonitor.removeDevice(node);
} }
void*
obtainFrame(int length)
{
if (auto input = jami::Manager::instance().getVideoManager().videoInput.lock())
return (*input).obtainFrame(length);
return nullptr;
}
void
releaseFrame(void* frame)
{
if (auto input = jami::Manager::instance().getVideoManager().videoInput.lock())
(*input).releaseFrame(frame);
}
#endif #endif
} // namespace DRing } // namespace DRing
......
...@@ -188,9 +188,6 @@ DRING_PUBLIC void stopLocalRecorder(const std::string& filepath); ...@@ -188,9 +188,6 @@ DRING_PUBLIC void stopLocalRecorder(const std::string& filepath);
#if defined(__ANDROID__) || defined(RING_UWP) || (defined(TARGET_OS_IOS) && TARGET_OS_IOS) #if defined(__ANDROID__) || defined(RING_UWP) || (defined(TARGET_OS_IOS) && TARGET_OS_IOS)
DRING_PUBLIC void addVideoDevice(const std::string &node, const std::vector<std::map<std::string, std::string>>* devInfo=nullptr); DRING_PUBLIC void addVideoDevice(const std::string &node, const std::vector<std::map<std::string, std::string>>* devInfo=nullptr);
DRING_PUBLIC void removeVideoDevice(const std::string &node); DRING_PUBLIC void removeVideoDevice(const std::string &node);
DRING_PUBLIC void* obtainFrame(int length);
DRING_PUBLIC void releaseFrame(void* frame);
DRING_PUBLIC VideoFrame* getNewFrame(); DRING_PUBLIC VideoFrame* getNewFrame();
DRING_PUBLIC void publishFrame(); DRING_PUBLIC void publishFrame();
#endif #endif
......
...@@ -56,94 +56,69 @@ static constexpr unsigned default_grab_height = 480; ...@@ -56,94 +56,69 @@ static constexpr unsigned default_grab_height = 480;
VideoInput::VideoInput() VideoInput::VideoInput()
: VideoGenerator::VideoGenerator() : VideoGenerator::VideoGenerator()
#if !VIDEO_CLIENT_INPUT
, sink_ {Manager::instance().createSinkClient("local")} , sink_ {Manager::instance().createSinkClient("local")}
, loop_(std::bind(&VideoInput::setup, this), , loop_(std::bind(&VideoInput::setup, this),
std::bind(&VideoInput::process, this), std::bind(&VideoInput::process, this),
std::bind(&VideoInput::cleanup, this)) std::bind(&VideoInput::cleanup, this))
#if defined(__ANDROID__) || defined(RING_UWP) || (defined(TARGET_OS_IOS) && TARGET_OS_IOS)
, mutex_(), frame_cv_(), buffers_()
#endif #endif
{} {}
VideoInput::~VideoInput() VideoInput::~VideoInput()
{ {
#if defined(__ANDROID__) || defined(RING_UWP) || (defined(TARGET_OS_IOS) && TARGET_OS_IOS) #if VIDEO_CLIENT_INPUT
/* we need to stop the loop and notify the condition variable emitSignal<DRing::VideoSignal::StopCapture>();
* to unblock the process loop */ capturing_ = false;
loop_.stop(); #else
frame_cv_.notify_one();
#endif
loop_.join(); loop_.join();
#endif
} }
#if defined(__ANDROID__) || defined(RING_UWP) || (defined(TARGET_OS_IOS) && TARGET_OS_IOS) void
bool VideoInput::waitForBufferFull() VideoInput::startLoop()
{ {
for(auto& buffer : buffers_) { #if VIDEO_CLIENT_INPUT
if (buffer.status == BUFFER_FULL) switchDevice();
return true; #else
} if (!loop_.isRunning())
loop_.start();
/* If the loop is stopped, returned true so we can quit the process loop */ #endif
return !isCapturing();
} }
void VideoInput::process() #if VIDEO_CLIENT_INPUT
void
VideoInput::switchDevice()
{ {
foundDecOpts(decOpts_);
if (switchPending_.exchange(false)) { if (switchPending_.exchange(false)) {
JAMI_DBG("Switching input to '%s'", decOpts_.input.c_str()); JAMI_DBG("Switching input to '%s'", decOpts_.input.c_str());
if (decOpts_.input.empty()) { if (decOpts_.input.empty()) {
loop_.stop(); capturing_ = false;
return; return;
} }
emitSignal<DRing::VideoSignal::StopCapture>(); emitSignal<DRing::VideoSignal::StopCapture>();
emitSignal<DRing::VideoSignal::StartCapture>(decOpts_.input); emitSignal<DRing::VideoSignal::StartCapture>(decOpts_.input);
capturing_ = true;
} }
std::unique_lock<std::mutex> lck(mutex_);
frame_cv_.wait(lck, [this] { return waitForBufferFull(); });
std::weak_ptr<VideoInput> wthis;
// shared_from_this throws in destructor
// assumes C++17
try {
wthis = shared_from_this();
} catch (...) {
return;
}
if (decOpts_.orientation != rotation_) {
setRotation(decOpts_.orientation);
rotation_ = decOpts_.orientation;
} }
for (auto& buffer : buffers_) { int VideoInput::getWidth() const
if (buffer.status == BUFFER_FULL && buffer.index == publish_index_) { { return decOpts_.width; }
auto& frame = getNewFrame();
AVPixelFormat format = getPixelFormat();
if (auto displayMatrix = displayMatrix_) int VideoInput::getHeight() const
av_frame_new_side_data_from_buf(frame.pointer(), AV_FRAME_DATA_DISPLAYMATRIX, av_buffer_ref(displayMatrix.get())); { return decOpts_.height; }
buffer.status = BUFFER_PUBLISHED; AVPixelFormat VideoInput::getPixelFormat() const
frame.setFromMemory((uint8_t*)buffer.data, format, decOpts_.width, decOpts_.height, {
[wthis](uint8_t* ptr) { int format;
if (auto sthis = wthis.lock()) std::stringstream ss;
sthis->releaseBufferCb(ptr); ss << decOpts_.format;
else ss >> format;
std::free(ptr); return (AVPixelFormat)format;
});
publish_index_++;
lck.unlock();
publishFrame();
break;
}
}
} }
#else
void void
VideoInput::setRotation(int angle) VideoInput::setRotation(int angle)
{ {
...@@ -157,26 +132,20 @@ VideoInput::setRotation(int angle) ...@@ -157,26 +132,20 @@ VideoInput::setRotation(int angle)
} }
} }
void VideoInput::cleanup() bool VideoInput::setup()
{ {
emitSignal<DRing::VideoSignal::StopCapture>(); if (not attach(sink_.get())) {
JAMI_ERR("attach sink failed");
return false;
}
if (detach(sink_.get())) if (!sink_->start())
sink_->stop(); JAMI_ERR("start sink failed");
std::lock_guard<std::mutex> lck(mutex_); JAMI_DBG("VideoInput ready to capture");
for (auto& buffer : buffers_) {
if (buffer.status == BUFFER_AVAILABLE ||
buffer.status == BUFFER_FULL) {
freeOneBuffer(buffer);
} else if (buffer.status != BUFFER_NOT_ALLOCATED) {
JAMI_ERR("Failed to free buffer [%p]", buffer.data);
}
}
setRotation(0); return true;
} }
#else
void void
VideoInput::process() VideoInput::process()
...@@ -199,35 +168,8 @@ VideoInput::cleanup() ...@@ -199,35 +168,8 @@ VideoInput::cleanup()
JAMI_DBG("VideoInput closed"); JAMI_DBG("VideoInput closed");
} }
#endif
bool VideoInput::setup()
{
if (not attach(sink_.get())) {
JAMI_ERR("attach sink failed");
return false;
}
if (!sink_->start())
JAMI_ERR("start sink failed");
JAMI_DBG("VideoInput ready to capture");
return true;
}
void VideoInput::clearOptions()
{
decOpts_ = {};
emulateRate_ = false;
}
bool bool
VideoInput::isCapturing() const noexcept VideoInput::captureFrame()
{
return loop_.isRunning();
}
bool VideoInput::captureFrame()
{ {
// Return true if capture could continue, false if must be stop // Return true if capture could continue, false if must be stop
if (not decoder_) if (not decoder_)
...@@ -245,92 +187,6 @@ bool VideoInput::captureFrame() ...@@ -245,92 +187,6 @@ bool VideoInput::captureFrame()
} }
} }
#if defined(__ANDROID__) || defined(RING_UWP) || (defined(TARGET_OS_IOS) && TARGET_OS_IOS)
int VideoInput::allocateOneBuffer(struct VideoFrameBuffer& b, int length)
{
b.data = std::malloc(length);
if (b.data) {
b.status = BUFFER_AVAILABLE;
b.length = length;
JAMI_DBG("Allocated buffer [%p]", b.data);
return 0;
}
JAMI_DBG("Failed to allocate memory for one buffer");
return -ENOMEM;
}
void VideoInput::freeOneBuffer(struct VideoFrameBuffer& b)
{
JAMI_DBG("Free buffer [%p]", b.data);
std::free(b.data);
b.data = nullptr;
b.length = 0;
b.status = BUFFER_NOT_ALLOCATED;
}
void VideoInput::releaseBufferCb(uint8_t* ptr)
{
std::lock_guard<std::mutex> lck(mutex_);
for(auto &buffer : buffers_) {
if (buffer.data == ptr) {
buffer.status = BUFFER_AVAILABLE;
if (!isCapturing())
freeOneBuffer(buffer);
break;
}
}
}
void*
VideoInput::obtainFrame(int length)
{
std::lock_guard<std::mutex> lck(mutex_);
/* allocate buffers. This is done there because it's only when the Android
* application requests a buffer that we know its size
*/
for(auto& buffer : buffers_) {
if (buffer.status == BUFFER_NOT_ALLOCATED) {
allocateOneBuffer(buffer, length);
}
}
/* search for an available frame */
for(auto& buffer : buffers_) {
if (buffer.length == static_cast<size_t>(length) && buffer.status == BUFFER_AVAILABLE) {
buffer.status = BUFFER_CAPTURING;
return buffer.data;
}
}
JAMI_WARN("No buffer found");
return nullptr;
}
void
VideoInput::releaseFrame(void *ptr)
{
std::lock_guard<std::mutex> lck(mutex_);
for(auto& buffer : buffers_) {
if (buffer.data == ptr) {
if (buffer.status != BUFFER_CAPTURING)
JAMI_ERR("Released a buffer with status %d, expected %d",
buffer.status, BUFFER_CAPTURING);
if (isCapturing()) {
buffer.status = BUFFER_FULL;
buffer.index = capture_index_++;
frame_cv_.notify_one();
} else {
freeOneBuffer(buffer);
}
break;
}
}
}
#endif
void void
VideoInput::createDecoder() VideoInput::createDecoder()
{ {
...@@ -400,6 +256,33 @@ VideoInput::deleteDecoder() ...@@ -400,6 +256,33 @@ VideoInput::deleteDecoder()
decoder_.reset(); decoder_.reset();
} }
int VideoInput::getWidth() const
{ return decoder_->getWidth(); }
int VideoInput::getHeight() const
{ return decoder_->getHeight(); }
AVPixelFormat VideoInput::getPixelFormat() const
{ return decoder_->getPixelFormat(); }
#endif
void VideoInput::clearOptions()
{
decOpts_ = {};
emulateRate_ = false;
}
bool
VideoInput::isCapturing() const noexcept
{
#if VIDEO_CLIENT_INPUT
return capturing_;
#else
return loop_.isRunning();
#endif
}
bool bool
VideoInput::initCamera(const std::string& device) VideoInput::initCamera(const std::string& device)
{ {
...@@ -540,7 +423,7 @@ VideoInput::switchInput(const std::string& resource) ...@@ -540,7 +423,7 @@ VideoInput::switchInput(const std::string& resource)
JAMI_DBG("MRL: '%s'", resource.c_str()); JAMI_DBG("MRL: '%s'", resource.c_str());
if (switchPending_) { if (switchPending_.exchange(true)) {
JAMI_ERR("Video switch already requested"); JAMI_ERR("Video switch already requested");
return {}; return {};
} }
...@@ -554,10 +437,8 @@ VideoInput::switchInput(const std::string& resource) ...@@ -554,10 +437,8 @@ VideoInput::switchInput(const std::string& resource)
// Switch off video input? // Switch off video input?
if (resource.empty()) { if (resource.empty()) {
clearOptions(); clearOptions();
switchPending_ = true;
if (!loop_.isRunning())
loop_.start();
futureDecOpts_ = foundDecOpts_.get_future(); futureDecOpts_ = foundDecOpts_.get_future();
startLoop();
return futureDecOpts_; return futureDecOpts_;
} }
...@@ -596,49 +477,21 @@ VideoInput::switchInput(const std::string& resource) ...@@ -596,49 +477,21 @@ VideoInput::switchInput(const std::string& resource)
if (ready) { if (ready) {
foundDecOpts(decOpts_); foundDecOpts(decOpts_);
} }
switchPending_ = true;
if (!loop_.isRunning())
loop_.start();
futureDecOpts_ = foundDecOpts_.get_future().share(); futureDecOpts_ = foundDecOpts_.get_future().share();
startLoop();
return futureDecOpts_; return futureDecOpts_;
} }
#if defined(__ANDROID__) || defined(RING_UWP) || (defined(TARGET_OS_IOS) && TARGET_OS_IOS)
int VideoInput::getWidth() const
{ return decOpts_.width; }
int VideoInput::getHeight() const
{ return decOpts_.height; }
AVPixelFormat VideoInput::getPixelFormat() const
{
int format;
std::stringstream ss;
ss << decOpts_.format;
ss >> format;
return (AVPixelFormat)format;
}
#else
int VideoInput::getWidth() const
{ return decoder_->getWidth(); }
int VideoInput::getHeight() const
{ return decoder_->getHeight(); }
AVPixelFormat VideoInput::getPixelFormat() const
{ return decoder_->getPixelFormat(); }
#endif
DeviceParams VideoInput::getParams() const DeviceParams VideoInput::getParams() const
{ return decOpts_; } { return decOpts_; }
MediaStream MediaStream
VideoInput::getInfo() const VideoInput::getInfo() const
{ {
#if !VIDEO_CLIENT_INPUT
if (decoder_) if (decoder_)
return decoder_->getStream("v:local"); return decoder_->getStream("v:local");
#endif
auto opts = futureDecOpts_.get(); auto opts = futureDecOpts_.get();
rational<int> fr(opts.framerate.numerator(), opts.framerate.denominator()); rational<int> fr(opts.framerate.numerator(), opts.framerate.denominator());
return MediaStream("v:local", av_get_pix_fmt(opts.pixel_format.c_str()), return MediaStream("v:local", av_get_pix_fmt(opts.pixel_format.c_str()),
......
...@@ -20,8 +20,7 @@ ...@@ -20,8 +20,7 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/ */
#ifndef __VIDEO_INPUT_H__ #pragma once
#define __VIDEO_INPUT_H__
#include "noncopyable.h" #include "noncopyable.h"
#include "threadloop.h" #include "threadloop.h"
...@@ -49,23 +48,11 @@ namespace jami { namespace video { ...@@ -49,23 +48,11 @@ namespace jami { namespace video {
class SinkClient; class SinkClient;
enum VideoFrameStatus { #if (defined(__ANDROID__) || defined(RING_UWP) || (defined(TARGET_OS_IOS) && TARGET_OS_IOS))
BUFFER_NOT_ALLOCATED, #define VIDEO_CLIENT_INPUT 1
BUFFER_AVAILABLE, /* owned by us */ #else
BUFFER_CAPTURING, /* owned by Android Java Application */ #define VIDEO_CLIENT_INPUT 0
BUFFER_FULL, /* owned by us again */ #endif
BUFFER_PUBLISHED, /* owned by libav */
};
struct VideoFrameBuffer {
void *data;
size_t length;
enum VideoFrameStatus status;
int index;
VideoFrameBuffer() : data(nullptr), length(0),
status(BUFFER_NOT_ALLOCATED), index(0) {}
};
class VideoInput : public VideoGenerator, public std::enable_shared_from_this<VideoInput> class VideoInput : public VideoGenerator, public std::enable_shared_from_this<VideoInput>
{ {
...@@ -84,7 +71,7 @@ public: ...@@ -84,7 +71,7 @@ public:
MediaStream getInfo() const; MediaStream getInfo() const;
std::shared_future<DeviceParams> switchInput(const std::string& resource); std::shared_future<DeviceParams> switchInput(const std::string& resource);
#if defined(__ANDROID__) || defined(RING_UWP) || (defined(TARGET_OS_IOS) && TARGET_OS_IOS) #if VIDEO_CLIENT_INPUT
/* /*
* these functions are used to pass buffer from/to the daemon * these functions are used to pass buffer from/to the daemon
* on the Android and UWP builds * on the Android and UWP builds
...@@ -97,30 +84,18 @@ private: ...@@ -97,30 +84,18 @@ private:
NON_COPYABLE(VideoInput); NON_COPYABLE(VideoInput);
std::string currentResource_; std::string currentResource_;
std::unique_ptr<MediaDecoder> decoder_;
std::shared_ptr<SinkClient> sink_;
std::atomic<bool> switchPending_ = {false}; std::atomic<bool> switchPending_ = {false};
DeviceParams decOpts_; DeviceParams decOpts_;
std::promise<DeviceParams> foundDecOpts_; std::promise<DeviceParams> foundDecOpts_;
std::shared_future<DeviceParams> futureDecOpts_; std::shared_future<DeviceParams> futureDecOpts_;
bool emulateRate_ = false;
std::atomic_bool decOptsFound_ {false}; std::atomic_bool decOptsFound_ {false};
void foundDecOpts(const DeviceParams& params); void foundDecOpts(const DeviceParams& params);
bool emulateRate_ = false;
ThreadLoop loop_;
void clearOptions(); void clearOptions();
void createDecoder();
void deleteDecoder();
int rotation_ {0};
std::shared_ptr<AVBufferRef> displayMatrix_;
void setRotation(int angle);
// true if decOpts_ is ready to use, false if using promise/future // true if decOpts_ is ready to use, false if using promise/future
bool initCamera(const std::string& device); bool initCamera(const std::string& device);
bool initX11(std::string display); bool initX11(std::string display);
...@@ -128,30 +103,30 @@ private: ...@@ -128,30 +103,30 @@ private:
bool initFile(std::string path); bool initFile(std::string path);
bool initGdiGrab(const std::string& params); bool initGdiGrab(const std::string& params);
bool isCapturing() const noexcept;
void startLoop();
#if VIDEO_CLIENT_INPUT
void switchDevice();
bool capturing_ {false};
#else
void createDecoder();
void deleteDecoder();
std::unique_ptr<MediaDecoder> decoder_;
std::shared_ptr<SinkClient> sink_;
ThreadLoop loop_;
// for ThreadLoop // for ThreadLoop
bool setup(); bool setup();
void process(); void process();
void cleanup(); void cleanup();
bool captureFrame(); bool captureFrame();
bool isCapturing() const noexcept;
#if defined(__ANDROID__) || defined(RING_UWP) || (defined(TARGET_OS_IOS) && TARGET_OS_IOS)
int allocateOneBuffer(struct VideoFrameBuffer& b, int length);
void freeOneBuffer(struct VideoFrameBuffer& b);
bool waitForBufferFull();
std::mutex mutex_;
std::condition_variable frame_cv_;
int capture_index_ = 0;
int publish_index_ = 0;
/* Get notified when libav is done with this buffer */ int rotation_ {0};
void releaseBufferCb(uint8_t* ptr); std::shared_ptr<AVBufferRef> displayMatrix_;
std::array<struct VideoFrameBuffer, 8> buffers_; void setRotation(int angle);
#endif #endif
}; };
}} // namespace jami::video }} // namespace jami::video
#endif // __VIDEO_INPUT_H__
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please to comment