Commit dda1f61d authored by Adrien Béraud's avatar Adrien Béraud Committed by Denys Vidal

video: add rotation

Change-Id: I0b760e4ae32ab372cef49ef9c2120e722e041ef1
parent 2a9e7bb9
......@@ -117,6 +117,15 @@
</arg>
</method>
<method name="setDeviceOrientation" tp:name-for-bindings="setDeviceOrientation">
<arg type="s" name="name" direction="in">
<tp:docstring>Device name</tp:docstring>
</arg>
<arg type="i" name="angle" direction="in">
<tp:docstring>Angle of device in degrees (counterclockwise)</tp:docstring>
</arg>
</method>
<method name="getRenderer" tp:name-for-bindings="getRenderer">
<tp:docstring>Returns a map of information about a call's renderer.</tp:docstring>
<annotation name="org.qtproject.QtDBus.QtTypeName.Out0" value="MapStringString"/>
......
......@@ -109,6 +109,12 @@ DBusVideoManager::setDecodingAccelerated(const bool& state)
DRing::setDecodingAccelerated(state);
}
void
DBusVideoManager::setDeviceOrientation(const std::string& name, const int& angle)
{
DRing::setDeviceOrientation(name, angle);
}
std::map<std::string, std::string>
DBusVideoManager::getRenderer(const std::string& callId)
{
......
......@@ -65,6 +65,7 @@ class DRING_PUBLIC DBusVideoManager :
bool hasCameraStarted();
bool getDecodingAccelerated();
void setDecodingAccelerated(const bool& state);
void setDeviceOrientation(const std::string& name, const int& angle);
std::map<std::string, std::string> getRenderer(const std::string& callId);
std::string startLocalRecorder(const bool& audioOnly, const std::string& filepath);
void stopLocalRecorder(const std::string& filepath);
......
......@@ -35,6 +35,7 @@
extern "C" {
#include <libavutil/pixdesc.h>
#include <libavutil/imgutils.h>
#include <libavutil/display.h>
#include <libavcodec/avcodec.h>
}
......@@ -50,6 +51,7 @@ public:
virtual void decodingStopped(const std::string& id, const std::string& shm_path, bool is_mixer) {}
virtual std::string startLocalRecorder(const bool& audioOnly, const std::string& filepath) {}
virtual void stopLocalRecorder(const std::string& filepath) {}
virtual void setDeviceOrientation(const std::string&, int angle) {}
};
%}
......@@ -61,8 +63,25 @@ std::map<ANativeWindow*, std::unique_ptr<DRing::FrameBuffer>> windows {};
std::mutex windows_mutex;
std::vector<uint8_t> workspace;
int rotAngle = 0;
AVBufferRef* rotMatrix = nullptr;
extern JavaVM *gJavaVM;
void setRotation(int angle)
{
if (angle == rotAngle)
return;
AVBufferRef* localFrameDataBuffer = angle == 0 ? nullptr : av_buffer_alloc(sizeof(int32_t) * 9);
if (localFrameDataBuffer)
av_display_rotation_set(reinterpret_cast<int32_t*>(localFrameDataBuffer->data), angle);
std::swap(rotMatrix, localFrameDataBuffer);
rotAngle = angle;
av_buffer_unref(&localFrameDataBuffer);
}
void rotateNV21(uint8_t* yinput, uint8_t* uvinput, unsigned ystride, unsigned uvstride, unsigned width, unsigned height, int rotation, uint8_t* youtput, uint8_t* uvoutput)
{
if (rotation == 0) {
......@@ -203,22 +222,10 @@ JNIEXPORT void JNICALL Java_cx_ring_daemon_RingserviceJNI_captureVideoFrame(JNIE
// False YUV422, actually NV12 or NV21
auto uvdata = std::min(udata, vdata);
avframe->format = uvdata == udata ? AV_PIX_FMT_NV12 : AV_PIX_FMT_NV21;
if (rotation == 0) {
avframe->data[0] = ydata;
avframe->linesize[0] = ystride;
avframe->data[1] = uvdata;
avframe->linesize[1] = uvstride;
} else {
directPointer = false;
bool swap = rotation != 0 && rotation != 180;
auto ow = avframe->width;
auto oh = avframe->height;
avframe->width = swap ? oh : ow;
avframe->height = swap ? ow : oh;
av_frame_get_buffer(avframe, 1);
rotateNV21(ydata, uvdata, ystride, uvstride, ow, oh, rotation, avframe->data[0], avframe->data[1]);
jenv->CallVoidMethod(image, jenv->GetMethodID(imageClass, "close", "()V"));
}
avframe->data[0] = ydata;
avframe->linesize[0] = ystride;
avframe->data[1] = uvdata;
avframe->linesize[1] = uvstride;
}
} else {
for (int i=0; i<planeCount; i++) {
......@@ -232,6 +239,10 @@ JNIEXPORT void JNICALL Java_cx_ring_daemon_RingserviceJNI_captureVideoFrame(JNIE
}
}
setRotation(rotation);
if (rotMatrix)
av_frame_new_side_data_from_buf(avframe, AV_FRAME_DATA_DISPLAYMATRIX, av_buffer_ref(rotMatrix));
if (directPointer) {
image = jenv->NewGlobalRef(image);
imageClass = (jclass)jenv->NewGlobalRef(imageClass);
......@@ -390,6 +401,7 @@ void applySettings(const std::string& name, const std::map<std::string, std::str
void addVideoDevice(const std::string &node);
void removeVideoDevice(const std::string &node);
void setDeviceOrientation(const std::string& name, int angle);
uint8_t* obtainFrame(int length);
void releaseFrame(uint8_t* frame);
void registerSinkTarget(const std::string& sinkId, const DRing::SinkTarget& target);
......
......@@ -38,6 +38,7 @@ public:
virtual void decodingStopped(const std::string& id, const std::string& shm_path, bool is_mixer) {}
virtual std::string startLocalRecorder(const bool& audioOnly, const std::string& filepath) {}
virtual void stopLocalRecorder(const std::string& filepath) {}
virtual void setDeviceOrientation(const std::string& name, int angle) {}
};
%}
......@@ -59,6 +60,7 @@ std::map<std::string, std::string> getSettings(const std::string& name);
void applySettings(const std::string& name, const std::map<std::string, std::string>& settings);
void registerSinkTarget(const std::string& sinkId, const DRing::SinkTarget& target);
void setDeviceOrientation(const std::string& name, int angle);
}
class VideoCallback {
......
......@@ -2,7 +2,7 @@ dnl Jami - configure.ac for automake 1.9 and autoconf 2.59
dnl Process this file with autoconf to produce a configure script.
AC_PREREQ([2.65])
AC_INIT([Ring Daemon],[7.4.0],[ring@gnu.org],[ring])
AC_INIT([Ring Daemon],[7.5.0],[ring@gnu.org],[ring])
AC_COPYRIGHT([[Copyright (c) Savoir-faire Linux 2004-2018]])
AC_REVISION([$Revision$])
......
......@@ -347,6 +347,12 @@ setDefaultDevice(const std::string& name)
ring::Manager::instance().saveConfig();
}
void
setDeviceOrientation(const std::string& name, int angle)
{
ring::Manager::instance().getVideoManager().setDeviceOrientation(name, angle);
}
std::map<std::string, std::string>
getDeviceParams(const std::string& name)
{
......@@ -619,4 +625,10 @@ getAudioInput(const std::string& id)
return input;
}
void
VideoManager::setDeviceOrientation(const std::string& name, int angle)
{
videoDeviceMonitor.setDeviceOrientation(name, angle);
}
} // namespace ring
......@@ -38,28 +38,31 @@ namespace ring {
struct VideoManager
{
public:
/**
* VideoManager acts as a cache of the active VideoInput.
* When this input is needed, you must use getVideoCamera
* to create the instance if not done yet and obtain a shared pointer
* for your own usage.
* VideoManager instance doesn't increment the reference count of
* this video input instance: this instance is destroyed when the last
* external user has released its shared pointer.
*/
std::weak_ptr<video::VideoInput> videoInput;
std::shared_ptr<video::VideoFrameActiveWriter> videoPreview;
video::VideoDeviceMonitor videoDeviceMonitor;
std::atomic_bool started;
/**
* VideoManager also acts as a cache of the active AudioInput(s).
* When one of these is needed, you must use getAudioInput, which will
* create an instance if need be and return a shared_ptr.
*/
std::map<std::string, std::weak_ptr<AudioInput>> audioInputs;
std::mutex audioMutex;
std::shared_ptr<AudioInput> audioPreview;
public:
void setDeviceOrientation(const std::string& name, int angle);
/**
* VideoManager acts as a cache of the active VideoInput.
* When this input is needed, you must use getVideoCamera
* to create the instance if not done yet and obtain a shared pointer
* for your own usage.
* VideoManager instance doesn't increment the reference count of
* this video input instance: this instance is destroyed when the last
* external user has released its shared pointer.
*/
std::weak_ptr<video::VideoInput> videoInput;
std::shared_ptr<video::VideoFrameActiveWriter> videoPreview;
video::VideoDeviceMonitor videoDeviceMonitor;
std::atomic_bool started;
/**
* VideoManager also acts as a cache of the active AudioInput(s).
* When one of these is needed, you must use getAudioInput, which will
* create an instance if need be and return a shared_ptr.
*/
std::map<std::string, std::weak_ptr<AudioInput>> audioInputs;
std::mutex audioMutex;
std::shared_ptr<AudioInput> audioPreview;
};
std::shared_ptr<video::VideoFrameActiveWriter> getVideoCamera();
......
......@@ -163,6 +163,7 @@ DRING_PUBLIC VideoCapabilities getCapabilities(const std::string& name);
DRING_PUBLIC std::map<std::string, std::string> getSettings(const std::string& name);
DRING_PUBLIC void applySettings(const std::string& name, const std::map<std::string, std::string>& settings);
DRING_PUBLIC void setDefaultDevice(const std::string& name);
DRING_PUBLIC void setDeviceOrientation(const std::string& name, int angle);
DRING_PUBLIC std::map<std::string, std::string> getDeviceParams(const std::string& name);
......
......@@ -48,6 +48,7 @@ struct DeviceParams {
std::string sdp_flags {};
unsigned offset_x {};
unsigned offset_y {};
int orientation {};
};
}
......
......@@ -189,28 +189,36 @@ MediaFilter::feedInput(AVFrame* frame, const std::string& inputName)
return fail(ss.str(), AVERROR(EINVAL));
}
AVFrame*
std::unique_ptr<MediaFrame>
MediaFilter::readOutput()
{
if (!initialized_) {
fail("Not properly initialized", -1);
return nullptr;
return {};
}
int ret = 0;
AVFrame* frame = av_frame_alloc();
ret = av_buffersink_get_frame_flags(output_, frame, 0);
if (ret >= 0) {
std::unique_ptr<MediaFrame> frame;
switch (av_buffersink_get_type(output_)) {
case AVMEDIA_TYPE_VIDEO:
frame = std::make_unique<VideoFrame>();
break;
case AVMEDIA_TYPE_AUDIO:
frame = std::make_unique<AudioFrame>();
break;
default:
return {};
}
auto err = av_buffersink_get_frame(output_, frame->pointer());
if (err >= 0) {
return frame;
} else if (ret == AVERROR(EAGAIN)) {
} else if (err == AVERROR(EAGAIN)) {
// no data available right now, try again
} else if (ret == AVERROR_EOF) {
} else if (err == AVERROR_EOF) {
RING_WARN() << "Filters have reached EOF, no more frames will be output";
} else {
fail("Error occurred while pulling from filter graph", ret);
fail("Error occurred while pulling from filter graph", err);
}
av_frame_free(&frame);
return nullptr;
return {};
}
void
......
......@@ -97,7 +97,7 @@ class MediaFilter {
*
* NOTE Frame reference belongs to the caller
*/
AVFrame* readOutput();
std::unique_ptr<MediaFrame> readOutput();
/**
* Flush filter to indicate EOF.
......
......@@ -461,11 +461,10 @@ MediaRecorder::filterAndEncode(MediaFilter* filter, int streamIdx)
while (auto frame = filter->readOutput()) {
try {
std::lock_guard<std::mutex> lk(mutex_);
encoder_->encode(frame, streamIdx);
encoder_->encode(frame->pointer(), streamIdx);
} catch (const MediaEncoderException& e) {
RING_ERR() << "Failed to record frame: " << e.what();
}
av_frame_free(&frame);
}
}
}
......
......@@ -226,6 +226,8 @@ HardwareAccel::transferToMainMemory(const VideoFrame& frame, AVPixelFormat desir
}
output->pts = input->pts;
if (AVFrameSideData* side_data = av_frame_get_side_data(input, AV_FRAME_DATA_DISPLAYMATRIX))
av_frame_new_side_data_from_buf(output, AV_FRAME_DATA_DISPLAYMATRIX, av_buffer_ref(side_data->buf));
return out;
}
......
......@@ -38,6 +38,7 @@
#include "libav_utils.h"
#include "video_scaler.h"
#include "smartools.h"
#include "media_filter.h"
#ifdef RING_ACCEL
#include "accel.h"
......@@ -54,9 +55,16 @@
#include <cerrno>
#include <cstring>
#include <stdexcept>
#include <cmath>
extern "C" {
#include <libavutil/display.h>
}
namespace ring { namespace video {
const constexpr char FILTER_INPUT_NAME[] = "in";
#if HAVE_SHM
// RAII class helper on sem_wait/sem_post sempahore operations
class SemGuardLock {
......@@ -82,7 +90,7 @@ class SemGuardLock {
class ShmHolder
{
public:
ShmHolder(const std::string& name={});
ShmHolder(const std::string& name = {});
~ShmHolder();
std::string name() const noexcept {
......@@ -316,12 +324,60 @@ SinkClient::SinkClient(const std::string& id, bool mixer)
#endif
{}
void
SinkClient::setRotation(int rotation)
{
if (rotation_ == rotation || width_ == 0 || height_ == 0)
return;
rotation_ = rotation;
RING_WARN("Rotation set to %d", rotation_);
auto in_name = FILTER_INPUT_NAME;
std::stringstream ss;
ss << "[" << in_name << "] " << "format=rgb32,"; // avoid https://trac.ffmpeg.org/ticket/5356
switch (rotation_) {
case 90 :
case -270 :
ss << "transpose=2";
break;
case 180 :
case -180 :
ss << "rotate=PI";
break;
case 270 :
case -90 :
ss << "transpose=1";
break;
default :
ss << "null";
}
const auto format = AV_PIX_FMT_RGB32;
const auto one = rational<int>(1);
std::vector<MediaStream> msv;
msv.emplace_back(in_name, format, one, width_, height_, one, one);
if (!rotation_) {
filter_.reset();
}
else {
filter_.reset(new MediaFilter);
auto ret = filter_->initialize(ss.str(), msv);
if (ret < 0) {
RING_ERR() << "filter init fail";
filter_ = nullptr;
rotation_ = 0;
}
}
}
void
SinkClient::update(Observable<std::shared_ptr<MediaFrame>>* /*obs*/,
const std::shared_ptr<MediaFrame>& frame_p)
{
auto& f = *std::static_pointer_cast<VideoFrame>(frame_p);
#ifdef DEBUG_FPS
auto currentTime = std::chrono::system_clock::now();
const std::chrono::duration<double> seconds = currentTime - lastFrameDebug_;
......@@ -338,7 +394,7 @@ SinkClient::update(Observable<std::shared_ptr<MediaFrame>>* /*obs*/,
if (avTarget_.push) {
auto outFrame = std::make_unique<VideoFrame>();
outFrame->copyFrom(f);
outFrame->copyFrom(*std::static_pointer_cast<VideoFrame>(frame_p));
avTarget_.push(std::move(outFrame));
}
......@@ -349,18 +405,32 @@ SinkClient::update(Observable<std::shared_ptr<MediaFrame>>* /*obs*/,
if (doTransfer) {
#ifdef RING_ACCEL
auto framePtr = HardwareAccel::transferToMainMemory(f, AV_PIX_FMT_NV12);
const auto& swFrame = *framePtr;
std::shared_ptr<VideoFrame> frame {HardwareAccel::transferToMainMemory(*std::static_pointer_cast<VideoFrame>(frame_p), AV_PIX_FMT_NV12)};
#else
const auto& swFrame = f;
std::shared_ptr<VideoFrame> frame {std::static_pointer_cast<VideoFrame>(frame_p)};
#endif
AVFrameSideData* side_data = av_frame_get_side_data(frame->pointer(), AV_FRAME_DATA_DISPLAYMATRIX);
if (side_data) {
auto matrix_rotation = reinterpret_cast<int32_t*>(side_data->data);
auto angle = av_display_rotation_get(matrix_rotation);
if (!std::isnan(angle))
setRotation(angle);
if (filter_) {
filter_->feedInput(frame->pointer(), FILTER_INPUT_NAME);
frame = std::static_pointer_cast<VideoFrame>(std::shared_ptr<MediaFrame>(filter_->readOutput()));
}
if (frame->height() != height_ || frame->width() != width_) {
setFrameSize(0, 0);
setFrameSize(frame->width(), frame->height());
}
}
#if HAVE_SHM
shm_->renderFrame(swFrame);
shm_->renderFrame(*frame);
#endif
if (target_.pull) {
VideoFrame dst;
const int width = swFrame.width();
const int height = swFrame.height();
const int width = frame->width();
const int height = frame->height();
#if defined(__ANDROID__) || (defined(__APPLE__) && !TARGET_OS_IPHONE)
const int format = AV_PIX_FMT_RGBA;
#else
......@@ -373,7 +443,7 @@ SinkClient::update(Observable<std::shared_ptr<MediaFrame>>* /*obs*/,
buffer_ptr->width = width;
buffer_ptr->height = height;
dst.setFromMemory(buffer_ptr->ptr, format, width, height);
scaler_->scale(swFrame, dst);
scaler_->scale(*frame, dst);
target_.push(std::move(buffer_ptr));
}
}
......
......@@ -35,6 +35,8 @@
#define DEBUG_FPS
namespace ring {class MediaFilter;}
namespace ring { namespace video {
#if HAVE_SHM
......@@ -88,9 +90,13 @@ class SinkClient : public VideoFramePassiveReader
int width_ {0};
int height_ {0};
bool started_ {false}; // used to arbitrate client's stop signal.
int rotation_ {0};
DRing::SinkTarget target_;
DRing::AVSinkTarget avTarget_;
std::unique_ptr<VideoScaler> scaler_;
std::unique_ptr<MediaFilter> filter_;
void setRotation(int rotation);
#ifdef DEBUG_FPS
unsigned frameCount_;
......
......@@ -578,7 +578,9 @@ VideoDevice::VideoDevice(const std::string& path, const std::vector<std::map<std
DeviceParams
VideoDevice::getDeviceParams() const
{
return deviceImpl_->getDeviceParams();
auto params = deviceImpl_->getDeviceParams();
params.orientation = orientation_;
return params;
}
void
......
......@@ -163,6 +163,10 @@ public:
setDeviceParams(params);
}
void setOrientation(int orientation) {
orientation_ = orientation;
}
/**
* Returns the parameters needed for actual use of the device
*/
......@@ -220,6 +224,8 @@ private:
*/
std::string node_ {};
int orientation_ {0};
/*
* Device specific implementation.
* On Linux, V4L2 stuffs go there.
......
......@@ -130,6 +130,15 @@ VideoDeviceMonitor::setDefaultDevice(const std::string& name)
}
}
void
VideoDeviceMonitor::setDeviceOrientation(const std::string& name, int angle)
{
const auto itd = findDeviceByName(name);
if (itd != devices_.cend()) {
itd->setOrientation(angle);
}
}
DeviceParams
VideoDeviceMonitor::getDeviceParams(const std::string& name) const
{
......
......@@ -56,6 +56,7 @@ class VideoDeviceMonitor : public Serializable
std::string getDefaultDevice() const;
std::string getMRLForDefaultDevice() const;
void setDefaultDevice(const std::string& name);
void setDeviceOrientation(const std::string& name, int angle);
void addDevice(const std::string &node, const std::vector<std::map<std::string, std::string>>* devInfo=nullptr);
void removeDevice(const std::string &node);
......
......@@ -44,6 +44,9 @@
#else
#include <unistd.h>
#endif
extern "C" {
#include <libavutil/display.h>
}
namespace ring { namespace video {
......@@ -70,6 +73,9 @@ VideoInput::~VideoInput()
frame_cv_.notify_one();
#endif
loop_.join();
if (auto localFrameDataBuffer = frameDataBuffer_.exchange(nullptr))
av_buffer_unref(&localFrameDataBuffer);
}
#if defined(__ANDROID__) || defined(RING_UWP) || (defined(TARGET_OS_IOS) && TARGET_OS_IOS)
......@@ -111,11 +117,19 @@ void VideoInput::process()
return;
}
if (decOpts_.orientation != rotation_) {
setRotation(decOpts_.orientation);
rotation_ = decOpts_.orientation;
}
for (auto& buffer : buffers_) {
if (buffer.status == BUFFER_FULL && buffer.index == publish_index_) {
auto& frame = getNewFrame();
AVPixelFormat format = getPixelFormat();
if (auto localFDB = frameDataBuffer_.load())
av_frame_new_side_data_from_buf(frame.pointer(), AV_FRAME_DATA_DISPLAYMATRIX, av_buffer_ref(localFDB));
buffer.status = BUFFER_PUBLISHED;
frame.setFromMemory((uint8_t*)buffer.data, format, decOpts_.width, decOpts_.height,
[wthis](uint8_t* ptr) {
......@@ -132,6 +146,18 @@ void VideoInput::process()
}
}
void
VideoInput::setRotation(int angle)
{
auto localFrameDataBuffer = (angle == 0) ? nullptr : av_buffer_alloc(sizeof(int32_t) * 9);
if (localFrameDataBuffer)
av_display_rotation_set(reinterpret_cast<int32_t*>(localFrameDataBuffer->data), angle);
localFrameDataBuffer = frameDataBuffer_.exchange(localFrameDataBuffer);
av_buffer_unref(&localFrameDataBuffer);
}
void VideoInput::cleanup()
{
emitSignal<DRing::VideoSignal::StopCapture>();
......@@ -148,6 +174,8 @@ void VideoInput::cleanup()
RING_ERR("Failed to free buffer [%p]", buffer.data);
}
}
setRotation(0);
}
#else
......
......@@ -74,6 +74,9 @@ public:
~VideoInput();
// as VideoGenerator
const std::string& getName() const {
return currentResource_;
}
int getWidth() const;
int getHeight() const;
AVPixelFormat getPixelFormat() const;
......@@ -114,6 +117,10 @@ private:
void createDecoder();
void deleteDecoder();
int rotation_ {0};
std::atomic<AVBufferRef*> frameDataBuffer_ {nullptr};
void setRotation(int angle);
// true if decOpts_ is ready to use, false if using promise/future
bool initCamera(const std::string& device);
bool initX11(std::string display);
......
......@@ -29,6 +29,10 @@
#include "logger.h"
#include "smartools.h"
extern "C" {
#include <libavutil/display.h>
}
#include <unistd.h>
#include <map>
......@@ -48,6 +52,7 @@ VideoReceiveThread::VideoReceiveThread(const std::string& id,
, sdpContext_(stream_.str().size(), false, &readFunction, 0, 0, this)
, sink_ {Manager::instance().createSinkClient(id)}
, mtu_(mtu)
, rotation_(0)
, requestKeyFrameCallback_(0)
, loop_(std::bind(&VideoReceiveThread::setup, this),
std::bind(&VideoReceiveThread::process, this),
......@@ -57,6 +62,8 @@ VideoReceiveThread::VideoReceiveThread(const std::string& id,
VideoReceiveThread::~VideoReceiveThread()
{
loop_.join();
auto localFDB = frameDataBuffer.exchange(nullptr);
av_buffer_unref(&localFDB);
}
void
......@@ -183,6 +190,9 @@ bool VideoReceiveThread::decodeFrame()
auto& frame = getNewFrame();
const auto ret = videoDecoder_->decode(frame);
if (auto localFDB = frameDataBuffer.load())
av_frame_new_side_data_from_buf(frame.pointer(), AV_FRAME_DATA_DISPLAYMATRIX, av_buffer_ref(localFDB));
switch (ret) {
case MediaDecoder::Status::FrameFinished:
publishFrame();
......@@ -258,4 +268,17 @@ VideoReceiveThread::triggerKeyFrameRequest()
requestKeyFrameCallback_();
}
void
VideoReceiveThread::setRotation(int angle)
{
auto localFrameDataBuffer = av_buffer_alloc(sizeof(int32_t) * 9); // matrix 3x3 of int32_t
if (localFrameDataBuffer)
av_display_rotation_set(reinterpret_cast<int32_t*>(localFrameDataBuffer->data), angle);
localFrameDataBuffer = frameDataBuffer.exchange(localFrameDataBuffer);
av_buffer_unref(&localFrameDataBuffer);
}
}} // namespace ring::video
......@@ -63,6 +63,13 @@ public:
MediaStream getInfo() const;
void triggerKeyFrameRequest();
/**
* Set angle of rotation to apply to the video by the decoder
*
* @param angle Angle of rotation in degrees (counterclockwise)
*/
void setRotation(int angle);
private:
NON_COPYABLE(VideoReceiveThread);
......@@ -81,6 +88,8 @@ private: