summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authormflodman@webrtc.org <mflodman@webrtc.org@4adac7df-926f-26a2-2b94-8c16560cd09d>2013-07-23 11:35:00 +0000
committermflodman@webrtc.org <mflodman@webrtc.org@4adac7df-926f-26a2-2b94-8c16560cd09d>2013-07-23 11:35:00 +0000
commitbf76ae2bdfc0b4e8e875f6cce1c85d4d66357beb (patch)
treed8e354ad48de8ace561eafc3e6316d6098e60e59
parent3df426b94aa0a96fdd37afb0d4642c0fbf6a2358 (diff)
downloadwebrtc-bf76ae2bdfc0b4e8e875f6cce1c85d4d66357beb.tar.gz
Hooking up first simple CPU adaptation version.
BUG= R=pbos@webrtc.org, stefan@webrtc.org Review URL: https://webrtc-codereview.appspot.com/1767004 git-svn-id: http://webrtc.googlecode.com/svn/trunk/webrtc@4384 4adac7df-926f-26a2-2b94-8c16560cd09d
-rw-r--r--video_engine/include/vie_base.h20
-rw-r--r--video_engine/internal/video_call.cc11
-rw-r--r--video_engine/internal/video_call.h4
-rw-r--r--video_engine/internal/video_engine.cc9
-rw-r--r--video_engine/internal/video_send_stream.cc59
-rw-r--r--video_engine/internal/video_send_stream.h5
-rw-r--r--video_engine/new_include/video_engine.h11
-rw-r--r--video_engine/overuse_frame_detector.cc45
-rw-r--r--video_engine/overuse_frame_detector.h18
-rw-r--r--video_engine/overuse_frame_detector_unittest.cc16
-rw-r--r--video_engine/test/full_stack.cc5
-rw-r--r--video_engine/test/loopback.cc5
-rw-r--r--video_engine/vie_base_impl.cc31
-rw-r--r--video_engine/vie_base_impl.h4
-rw-r--r--video_engine/vie_capture_impl.cc12
-rw-r--r--video_engine/vie_capturer.cc14
-rw-r--r--video_engine/vie_capturer.h6
-rw-r--r--video_engine/vie_encoder.cc4
-rw-r--r--video_engine/vie_encoder.h4
-rw-r--r--video_engine/vie_shared_data.h8
20 files changed, 239 insertions, 52 deletions
diff --git a/video_engine/include/vie_base.h b/video_engine/include/vie_base.h
index c1f7e51f..f4e9d729 100644
--- a/video_engine/include/vie_base.h
+++ b/video_engine/include/vie_base.h
@@ -26,6 +26,20 @@ namespace webrtc {
class Config;
class VoiceEngine;
+// CpuOveruseObserver is called when a system overuse is detected and
+// VideoEngine cannot keep up the encoding frequency.
+class CpuOveruseObserver {
+ public:
+ // Called as soon as an overuse is detected.
+ virtual void OveruseDetected() = 0;
+ // Called periodically when the system is not overused any longer.
+ virtual void NormalUsage() = 0;
+
+ protected:
+ virtual ~CpuOveruseObserver() {}
+};
+
+
class WEBRTC_DLLEXPORT VideoEngine {
public:
// Creates a VideoEngine object, which can then be used to acquire subā€APIs.
@@ -96,6 +110,12 @@ class WEBRTC_DLLEXPORT ViEBase {
// Deletes an existing channel and releases the utilized resources.
virtual int DeleteChannel(const int video_channel) = 0;
+ // Registers an observer to be called when an overuse is detected, see
+ // 'CpuOveruseObserver' for details.
+ // NOTE: This is still very experimental functionality.
+ virtual int RegisterCpuOveruseObserver(int channel,
+ CpuOveruseObserver* observer) = 0;
+
// Specifies the VoiceEngine and VideoEngine channel pair to use for
// audio/video synchronization.
virtual int ConnectAudioChannel(const int video_channel,
diff --git a/video_engine/internal/video_call.cc b/video_engine/internal/video_call.cc
index adaa14bf..db77c760 100644
--- a/video_engine/internal/video_call.cc
+++ b/video_engine/internal/video_call.cc
@@ -28,13 +28,13 @@ namespace webrtc {
namespace internal {
VideoCall::VideoCall(webrtc::VideoEngine* video_engine,
- newapi::Transport* send_transport)
- : send_transport(send_transport),
+ const newapi::VideoCall::Config& config)
+ : config_(config),
receive_lock_(RWLockWrapper::CreateRWLock()),
send_lock_(RWLockWrapper::CreateRWLock()),
video_engine_(video_engine) {
assert(video_engine != NULL);
- assert(send_transport != NULL);
+ assert(config.send_transport != NULL);
rtp_rtcp_ = ViERTP_RTCP::GetInterface(video_engine_);
assert(rtp_rtcp_ != NULL);
@@ -75,7 +75,8 @@ newapi::VideoSendStream* VideoCall::CreateSendStream(
config.codec.numberOfSimulcastStreams == config.rtp.ssrcs.size());
VideoSendStream* send_stream =
- new VideoSendStream(send_transport, video_engine_, config);
+ new VideoSendStream(config_.send_transport, config_.overuse_detection,
+ video_engine_, config);
WriteLockScoped write_lock(*send_lock_);
for (size_t i = 0; i < config.rtp.ssrcs.size(); ++i) {
@@ -104,7 +105,7 @@ VideoReceiveStream::Config VideoCall::GetDefaultReceiveConfig() {
newapi::VideoReceiveStream* VideoCall::CreateReceiveStream(
const newapi::VideoReceiveStream::Config& config) {
VideoReceiveStream* receive_stream = new VideoReceiveStream(
- video_engine_, config, send_transport);
+ video_engine_, config, config_.send_transport);
WriteLockScoped write_lock(*receive_lock_);
assert(receive_ssrcs_.find(config.rtp.ssrc) == receive_ssrcs_.end());
diff --git a/video_engine/internal/video_call.h b/video_engine/internal/video_call.h
index 817d70fc..ba2f040d 100644
--- a/video_engine/internal/video_call.h
+++ b/video_engine/internal/video_call.h
@@ -34,7 +34,7 @@ namespace internal {
class VideoCall : public newapi::VideoCall, public newapi::PacketReceiver {
public:
VideoCall(webrtc::VideoEngine* video_engine,
- newapi::Transport* send_transport);
+ const newapi::VideoCall::Config& config);
virtual ~VideoCall();
virtual newapi::PacketReceiver* Receiver() OVERRIDE;
@@ -67,7 +67,7 @@ class VideoCall : public newapi::VideoCall, public newapi::PacketReceiver {
bool DeliverRtcp(ModuleRTPUtility::RTPHeaderParser* rtp_parser,
const void* packet, size_t length);
- newapi::Transport* send_transport;
+ newapi::VideoCall::Config config_;
std::map<uint32_t, newapi::VideoReceiveStream*> receive_ssrcs_;
scoped_ptr<RWLockWrapper> receive_lock_;
diff --git a/video_engine/internal/video_engine.cc b/video_engine/internal/video_engine.cc
index 4ba8c43e..10dd3fa5 100644
--- a/video_engine/internal/video_engine.cc
+++ b/video_engine/internal/video_engine.cc
@@ -21,8 +21,8 @@ namespace internal {
class VideoEngine : public newapi::VideoEngine {
public:
- explicit VideoEngine(const newapi::VideoEngineConfig& engine_config)
- : config_(engine_config) {
+ explicit VideoEngine(const newapi::VideoEngineConfig& config)
+ : config_(config) {
video_engine_ = webrtc::VideoEngine::Create();
assert(video_engine_ != NULL);
@@ -36,8 +36,9 @@ class VideoEngine : public newapi::VideoEngine {
virtual ~VideoEngine() { webrtc::VideoEngine::Delete(video_engine_); }
- virtual newapi::VideoCall* CreateCall(newapi::Transport* transport) OVERRIDE {
- return new VideoCall(video_engine_, transport);
+ virtual newapi::VideoCall* CreateCall(
+ const newapi::VideoCall::Config& config) OVERRIDE {
+ return new VideoCall(video_engine_, config);
}
private:
diff --git a/video_engine/internal/video_send_stream.cc b/video_engine/internal/video_send_stream.cc
index f50ca118..b2401daf 100644
--- a/video_engine/internal/video_send_stream.cc
+++ b/video_engine/internal/video_send_stream.cc
@@ -23,7 +23,58 @@
namespace webrtc {
namespace internal {
+// Super simple and temporary overuse logic. This will move to the application
+// as soon as the new API allows changing send codec on the fly.
+class ResolutionAdaptor : public webrtc::CpuOveruseObserver {
+ public:
+ ResolutionAdaptor(ViECodec* codec, int channel, size_t width, size_t height)
+ : codec_(codec),
+ channel_(channel),
+ max_width_(width),
+ max_height_(height) {}
+
+ virtual ~ResolutionAdaptor() {}
+
+ virtual void OveruseDetected() OVERRIDE {
+ VideoCodec codec;
+ if (codec_->GetSendCodec(channel_, codec) != 0)
+ return;
+
+ if (codec.width / 2 < min_width || codec.height / 2 < min_height)
+ return;
+
+ codec.width /= 2;
+ codec.height /= 2;
+ codec_->SetSendCodec(channel_, codec);
+ }
+
+ virtual void NormalUsage() OVERRIDE {
+ VideoCodec codec;
+ if (codec_->GetSendCodec(channel_, codec) != 0)
+ return;
+
+ if (codec.width * 2u > max_width_ || codec.height * 2u > max_height_)
+ return;
+
+ codec.width *= 2;
+ codec.height *= 2;
+ codec_->SetSendCodec(channel_, codec);
+ }
+
+ private:
+ // Temporary and arbitrary chosen minimum resolution.
+ static const size_t min_width = 160;
+ static const size_t min_height = 120;
+
+ ViECodec* codec_;
+ const int channel_;
+
+ const size_t max_width_;
+ const size_t max_height_;
+};
+
VideoSendStream::VideoSendStream(newapi::Transport* transport,
+ bool overuse_detection,
webrtc::VideoEngine* video_engine,
const newapi::VideoSendStream::Config& config)
: transport_(transport), config_(config) {
@@ -57,6 +108,14 @@ VideoSendStream::VideoSendStream(newapi::Transport* transport,
if (codec_->SetSendCodec(channel_, config_.codec) != 0) {
abort();
}
+
+ if (overuse_detection) {
+ overuse_observer_.reset(
+ new ResolutionAdaptor(codec_, channel_, config_.codec.width,
+ config_.codec.height));
+ video_engine_base_->RegisterCpuOveruseObserver(channel_,
+ overuse_observer_.get());
+ }
}
VideoSendStream::~VideoSendStream() {
diff --git a/video_engine/internal/video_send_stream.h b/video_engine/internal/video_send_stream.h
index f5f05169..571f4282 100644
--- a/video_engine/internal/video_send_stream.h
+++ b/video_engine/internal/video_send_stream.h
@@ -28,11 +28,15 @@ class ViENetwork;
class ViERTP_RTCP;
namespace internal {
+
+class ResolutionAdaptor;
+
class VideoSendStream : public newapi::VideoSendStream,
public newapi::VideoSendStreamInput,
public webrtc::Transport {
public:
VideoSendStream(newapi::Transport* transport,
+ bool overuse_detection,
webrtc::VideoEngine* video_engine,
const newapi::VideoSendStream::Config& config);
@@ -72,6 +76,7 @@ class VideoSendStream : public newapi::VideoSendStream,
int channel_;
int capture_id_;
+ scoped_ptr<ResolutionAdaptor> overuse_observer_;
};
} // namespace internal
} // namespace webrtc
diff --git a/video_engine/new_include/video_engine.h b/video_engine/new_include/video_engine.h
index 3d042a1f..440f6c5c 100644
--- a/video_engine/new_include/video_engine.h
+++ b/video_engine/new_include/video_engine.h
@@ -49,6 +49,13 @@ struct VideoEngineConfig {
// estimates etc.
class VideoCall {
public:
+ struct Config {
+ Config() : send_transport(NULL), overuse_detection(false) {}
+
+ Transport* send_transport;
+ bool overuse_detection;
+ };
+
virtual std::vector<VideoCodec> GetVideoCodecs() = 0;
virtual VideoSendStream::Config GetDefaultSendConfig() = 0;
@@ -87,10 +94,10 @@ class VideoCall {
// calls.
class VideoEngine {
public:
- static VideoEngine* Create(const VideoEngineConfig& engine_config);
+ static VideoEngine* Create(const VideoEngineConfig& config);
virtual ~VideoEngine() {}
- virtual VideoCall* CreateCall(Transport* send_transport) = 0;
+ virtual VideoCall* CreateCall(const VideoCall::Config& config) = 0;
};
} // namespace newapi
diff --git a/video_engine/overuse_frame_detector.cc b/video_engine/overuse_frame_detector.cc
index a373c9a6..61d1506a 100644
--- a/video_engine/overuse_frame_detector.cc
+++ b/video_engine/overuse_frame_detector.cc
@@ -10,10 +10,9 @@
#include "webrtc/video_engine/overuse_frame_detector.h"
-#include <cassert>
-
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/video_engine/include/vie_base.h"
namespace webrtc {
@@ -21,19 +20,24 @@ namespace webrtc {
const int64_t kProcessIntervalMs = 2000;
const int kOveruseHistoryMs = 5000;
const float kMinEncodeRatio = 29 / 30.0f;
+const int kMinCallbackDeltaMs = 30000;
-OveruseFrameDetector::OveruseFrameDetector(Clock* clock,
- OveruseObserver* observer)
+OveruseFrameDetector::OveruseFrameDetector(Clock* clock)
: crit_(CriticalSectionWrapper::CreateCriticalSection()),
- observer_(observer),
+ observer_(NULL),
clock_(clock),
- last_process_time_(clock->TimeInMilliseconds()) {
- assert(observer);
+ last_process_time_(clock->TimeInMilliseconds()),
+ last_callback_time_(clock->TimeInMilliseconds()) {
}
OveruseFrameDetector::~OveruseFrameDetector() {
}
+void OveruseFrameDetector::SetObserver(CpuOveruseObserver* observer) {
+ CriticalSectionScoped cs(crit_.get());
+ observer_ = observer;
+}
+
void OveruseFrameDetector::CapturedFrame() {
CriticalSectionScoped cs(crit_.get());
CleanOldSamples();
@@ -51,30 +55,43 @@ int32_t OveruseFrameDetector::TimeUntilNextProcess() {
int32_t OveruseFrameDetector::Process() {
CriticalSectionScoped cs(crit_.get());
- if (clock_->TimeInMilliseconds() < last_process_time_ + kProcessIntervalMs)
+ int64_t now = clock_->TimeInMilliseconds();
+ if (now < last_process_time_ + kProcessIntervalMs)
return 0;
- last_process_time_ = clock_->TimeInMilliseconds();
- CleanOldSamples();
-
- if (encode_times_.size() == 0 || capture_times_.size() == 0)
+ last_process_time_ = now;
+ if (!observer_ || encode_times_.size() == 0 || capture_times_.size() == 0)
return 0;
+ CleanOldSamples();
+ if (encode_times_.front() > now - kOveruseHistoryMs / 2) {
+ return 0;
+ }
float encode_ratio = encode_times_.size() /
static_cast<float>(capture_times_.size());
if (encode_ratio < kMinEncodeRatio) {
observer_->OveruseDetected();
+ capture_times_.clear();
+ encode_times_.clear();
+ last_callback_time_ = now;
+ } else if (last_callback_time_ < now - kMinCallbackDeltaMs) {
+ // TODO(mflodman) This should only be triggered if we have a good reason to
+ // believe we can increase the resolution again.
+ observer_->NormalUsage();
+ last_callback_time_ = now;
+ capture_times_.clear();
+ encode_times_.clear();
}
return 0;
}
void OveruseFrameDetector::CleanOldSamples() {
int64_t time_now = clock_->TimeInMilliseconds();
- while (capture_times_.size() > 0 &&
+ while (!capture_times_.empty() &&
capture_times_.front() < time_now - kOveruseHistoryMs) {
capture_times_.pop_front();
}
- while (encode_times_.size() > 0 &&
+ while (!encode_times_.empty() &&
encode_times_.front() < time_now - kOveruseHistoryMs) {
encode_times_.pop_front();
}
diff --git a/video_engine/overuse_frame_detector.h b/video_engine/overuse_frame_detector.h
index d631d34a..e382c7da 100644
--- a/video_engine/overuse_frame_detector.h
+++ b/video_engine/overuse_frame_detector.h
@@ -21,22 +21,17 @@ namespace webrtc {
class Clock;
class CriticalSectionWrapper;
+class CpuOveruseObserver;
-class OveruseObserver {
- public:
- // Called when an overuse has been detected, based on the number of calls to
- // 'CapturedFrame' and 'EncodedFrame'.
- virtual void OveruseDetected() = 0;
- virtual ~OveruseObserver() {}
-};
-
-// Use to detect system overuse based on the number of captured frames vs. the
+// Use to detect system overuse based on the number of captured frames vs the
// number of encoded frames.
class OveruseFrameDetector : public Module {
public:
- OveruseFrameDetector(Clock* clock, OveruseObserver* observer);
+ explicit OveruseFrameDetector(Clock* clock);
~OveruseFrameDetector();
+ void SetObserver(CpuOveruseObserver* observer);
+
// Called for each new captured frame.
void CapturedFrame();
@@ -54,10 +49,11 @@ class OveruseFrameDetector : public Module {
scoped_ptr<CriticalSectionWrapper> crit_;
// Observer getting overuse reports.
- OveruseObserver* observer_;
+ CpuOveruseObserver* observer_;
Clock* clock_;
int64_t last_process_time_;
+ int64_t last_callback_time_;
// Capture time for frames.
std::list<int64_t> capture_times_;
diff --git a/video_engine/overuse_frame_detector_unittest.cc b/video_engine/overuse_frame_detector_unittest.cc
index 383f80f4..864f1a2d 100644
--- a/video_engine/overuse_frame_detector_unittest.cc
+++ b/video_engine/overuse_frame_detector_unittest.cc
@@ -13,6 +13,7 @@
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/video_engine/overuse_frame_detector.h"
using ::testing::_;
@@ -23,24 +24,25 @@ namespace webrtc {
const int kProcessIntervalMs = 2000;
-class MockOveruseObserver : public OveruseObserver {
+class MockCpuOveruseObserver : public CpuOveruseObserver {
public:
- MockOveruseObserver() {}
- virtual ~MockOveruseObserver() {}
+ MockCpuOveruseObserver() {}
+ virtual ~MockCpuOveruseObserver() {}
MOCK_METHOD0(OveruseDetected, void());
+ MOCK_METHOD0(NormalUsage, void());
};
class OveruseFrameDetectorTest : public ::testing::Test {
protected:
virtual void SetUp() {
clock_.reset(new SimulatedClock(1234));
- observer_.reset(new MockOveruseObserver());
- overuse_detector_.reset(new OveruseFrameDetector(clock_.get(),
- observer_.get()));
+ observer_.reset(new MockCpuOveruseObserver());
+ overuse_detector_.reset(new OveruseFrameDetector(clock_.get()));
+ overuse_detector_->SetObserver(observer_.get());
}
scoped_ptr<SimulatedClock> clock_;
- scoped_ptr<MockOveruseObserver> observer_;
+ scoped_ptr<MockCpuOveruseObserver> observer_;
scoped_ptr<OveruseFrameDetector> overuse_detector_;
};
diff --git a/video_engine/test/full_stack.cc b/video_engine/test/full_stack.cc
index b19d20ab..626da309 100644
--- a/video_engine/test/full_stack.cc
+++ b/video_engine/test/full_stack.cc
@@ -284,7 +284,10 @@ TEST_P(FullStackTest, NoPacketLoss) {
params.avg_ssim_threshold,
static_cast<uint64_t>(FLAGS_seconds * params.clip.fps));
- scoped_ptr<newapi::VideoCall> call(video_engine->CreateCall(&analyzer));
+ newapi::VideoCall::Config call_config;
+ call_config.send_transport = &analyzer;
+
+ scoped_ptr<newapi::VideoCall> call(video_engine->CreateCall(call_config));
analyzer.receiver_ = call->Receiver();
transport.SetReceiver(&analyzer);
diff --git a/video_engine/test/loopback.cc b/video_engine/test/loopback.cc
index 5040799e..b298b1af 100644
--- a/video_engine/test/loopback.cc
+++ b/video_engine/test/loopback.cc
@@ -42,7 +42,10 @@ TEST_F(LoopbackTest, Test) {
newapi::VideoEngine::Create(webrtc::newapi::VideoEngineConfig()));
test::DirectTransport transport(NULL);
- scoped_ptr<newapi::VideoCall> call(video_engine->CreateCall(&transport));
+ newapi::VideoCall::Config call_config;
+ call_config.send_transport = &transport;
+ call_config.overuse_detection = true;
+ scoped_ptr<newapi::VideoCall> call(video_engine->CreateCall(call_config));
// Loopback, call sends to itself.
transport.SetReceiver(call->Receiver());
diff --git a/video_engine/vie_base_impl.cc b/video_engine/vie_base_impl.cc
index d7923164..17e9a744 100644
--- a/video_engine/vie_base_impl.cc
+++ b/video_engine/vie_base_impl.cc
@@ -21,6 +21,7 @@
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/video_engine/include/vie_errors.h"
+#include "webrtc/video_engine/vie_capturer.h"
#include "webrtc/video_engine/vie_channel.h"
#include "webrtc/video_engine/vie_channel_manager.h"
#include "webrtc/video_engine/vie_defines.h"
@@ -86,6 +87,36 @@ int ViEBaseImpl::SetVoiceEngine(VoiceEngine* voice_engine) {
return 0;
}
+int ViEBaseImpl::RegisterCpuOveruseObserver(int video_channel,
+ CpuOveruseObserver* observer) {
+ ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
+ ViEChannel* vie_channel = cs.Channel(video_channel);
+ if (!vie_channel) {
+ WEBRTC_TRACE(kTraceError,
+ kTraceVideo,
+ ViEId(shared_data_.instance_id()),
+ "%s: channel %d doesn't exist",
+ __FUNCTION__,
+ video_channel);
+ shared_data_.SetLastError(kViEBaseInvalidChannelId);
+ return -1;
+ }
+ ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+ assert(vie_encoder);
+
+ ViEInputManagerScoped is(*(shared_data_.input_manager()));
+ ViEFrameProviderBase* provider = is.FrameProvider(vie_encoder);
+ if (provider) {
+ ViECapturer* capturer = is.Capture(provider->Id());
+ assert(capturer);
+ capturer->RegisterCpuOveruseObserver(observer);
+ }
+
+ shared_data_.overuse_observers()->insert(
+ std::pair<int, CpuOveruseObserver*>(video_channel, observer));
+ return 0;
+}
+
int ViEBaseImpl::CreateChannel(int& video_channel) { // NOLINT
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
"%s", __FUNCTION__);
diff --git a/video_engine/vie_base_impl.h b/video_engine/vie_base_impl.h
index 13723a11..fca63f4f 100644
--- a/video_engine/vie_base_impl.h
+++ b/video_engine/vie_base_impl.h
@@ -31,6 +31,8 @@ class ViEBaseImpl
// Implements ViEBase.
virtual int Init();
virtual int SetVoiceEngine(VoiceEngine* voice_engine);
+ virtual int RegisterCpuOveruseObserver(int channel,
+ CpuOveruseObserver* observer);
virtual int CreateChannel(int& video_channel); // NOLINT
virtual int CreateChannel(int& video_channel, // NOLINT
int original_channel);
@@ -48,7 +50,7 @@ class ViEBaseImpl
virtual int LastError();
protected:
- ViEBaseImpl(const Config& config);
+ explicit ViEBaseImpl(const Config& config);
virtual ~ViEBaseImpl();
ViESharedData* shared_data() { return &shared_data_; }
diff --git a/video_engine/vie_capture_impl.cc b/video_engine/vie_capture_impl.cc
index 28b4d19e..85353c30 100644
--- a/video_engine/vie_capture_impl.cc
+++ b/video_engine/vie_capture_impl.cc
@@ -10,6 +10,8 @@
#include "webrtc/video_engine/vie_capture_impl.h"
+#include <map>
+
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/video_engine/include/vie_errors.h"
#include "webrtc/video_engine/vie_capturer.h"
@@ -23,6 +25,8 @@
namespace webrtc {
+class CpuOveruseObserver;
+
ViECapture* ViECapture::GetInterface(VideoEngine* video_engine) {
#ifdef WEBRTC_VIDEO_ENGINE_CAPTURE_API
if (!video_engine) {
@@ -201,6 +205,11 @@ int ViECaptureImpl::ConnectCaptureDevice(const int capture_id,
shared_data_->SetLastError(kViECaptureDeviceUnknownError);
return -1;
}
+ std::map<int, CpuOveruseObserver*>::iterator it =
+ shared_data_->overuse_observers()->find(video_channel);
+ if (it != shared_data_->overuse_observers()->end()) {
+ vie_capture->RegisterCpuOveruseObserver(it->second);
+ }
return 0;
}
@@ -241,6 +250,9 @@ int ViECaptureImpl::DisconnectCaptureDevice(const int video_channel) {
return -1;
}
+ ViECapturer* vie_capture = is.Capture(frame_provider->Id());
+ assert(vie_capture);
+ vie_capture->RegisterCpuOveruseObserver(NULL);
if (frame_provider->DeregisterFrameCallback(vie_encoder) != 0) {
shared_data_->SetLastError(kViECaptureDeviceUnknownError);
return -1;
diff --git a/video_engine/vie_capturer.cc b/video_engine/vie_capturer.cc
index abef8aaa..5364a8fa 100644
--- a/video_engine/vie_capturer.cc
+++ b/video_engine/vie_capturer.cc
@@ -21,7 +21,9 @@
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
+#include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/video_engine/include/vie_image_process.h"
+#include "webrtc/video_engine/overuse_frame_detector.h"
#include "webrtc/video_engine/vie_defines.h"
#include "webrtc/video_engine/vie_encoder.h"
@@ -55,7 +57,8 @@ ViECapturer::ViECapturer(int capture_id,
reported_brightness_level_(Normal),
denoising_enabled_(false),
observer_cs_(CriticalSectionWrapper::CreateCriticalSection()),
- observer_(NULL) {
+ observer_(NULL),
+ overuse_detector_(new OveruseFrameDetector(Clock::GetRealTimeClock())) {
WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id, capture_id),
"ViECapturer::ViECapturer(capture_id: %d, engine_id: %d)",
capture_id, engine_id);
@@ -66,12 +69,14 @@ ViECapturer::ViECapturer(int capture_id,
} else {
assert(false);
}
+ module_process_thread_.RegisterModule(overuse_detector_.get());
}
ViECapturer::~ViECapturer() {
WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id_, capture_id_),
"ViECapturer::~ViECapturer() - capture_id: %d, engine_id: %d",
capture_id_, engine_id_);
+ module_process_thread_.DeRegisterModule(overuse_detector_.get());
// Stop the thread.
deliver_cs_->Enter();
@@ -255,6 +260,10 @@ const char* ViECapturer::CurrentDeviceName() const {
return capture_module_->CurrentDeviceName();
}
+void ViECapturer::RegisterCpuOveruseObserver(CpuOveruseObserver* observer) {
+ overuse_detector_->SetObserver(observer);
+}
+
int32_t ViECapturer::SetCaptureDelay(int32_t delay_ms) {
return capture_module_->SetCaptureDelay(delay_ms);
}
@@ -340,6 +349,7 @@ void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id,
captured_frame_.SwapFrame(&video_frame);
capture_event_.Set();
+ overuse_detector_->CapturedFrame();
return;
}
@@ -503,6 +513,8 @@ bool ViECapturer::ViECaptureProcess() {
if (!captured_frame_.IsZeroSize()) {
// New I420 frame.
capture_cs_->Enter();
+ // The frame sent for encoding, update the overuse detector.
+ overuse_detector_->EncodedFrame();
deliver_frame_.SwapFrame(&captured_frame_);
captured_frame_.ResetSize();
capture_cs_->Leave();
diff --git a/video_engine/vie_capturer.h b/video_engine/vie_capturer.h
index d81adfd9..ff656ffd 100644
--- a/video_engine/vie_capturer.h
+++ b/video_engine/vie_capturer.h
@@ -30,6 +30,8 @@ namespace webrtc {
class Config;
class CriticalSectionWrapper;
class EventWrapper;
+class CpuOveruseObserver;
+class OveruseFrameDetector;
class ProcessThread;
class ThreadWrapper;
class ViEEffectFilter;
@@ -100,6 +102,8 @@ class ViECapturer
// Information.
const char* CurrentDeviceName() const;
+ void RegisterCpuOveruseObserver(CpuOveruseObserver* observer);
+
protected:
ViECapturer(int capture_id,
int engine_id,
@@ -173,6 +177,8 @@ class ViECapturer
CaptureCapability requested_capability_;
I420VideoFrame capture_device_image_;
+
+ scoped_ptr<OveruseFrameDetector> overuse_detector_;
};
} // namespace webrtc
diff --git a/video_engine/vie_encoder.cc b/video_engine/vie_encoder.cc
index 711bf7d5..206a718d 100644
--- a/video_engine/vie_encoder.cc
+++ b/video_engine/vie_encoder.cc
@@ -57,6 +57,7 @@ static const int kTransmissionMaxBitrateMultiplier = 2;
class QMVideoSettingsCallback : public VCMQMSettingsCallback {
public:
explicit QMVideoSettingsCallback(VideoProcessingModule* vpm);
+
~QMVideoSettingsCallback();
// Update VPM with QM (quality modes: frame size & frame rate) settings.
@@ -73,6 +74,7 @@ class ViEBitrateObserver : public BitrateObserver {
explicit ViEBitrateObserver(ViEEncoder* owner)
: owner_(owner) {
}
+ virtual ~ViEBitrateObserver() {}
// Implements BitrateObserver.
virtual void OnNetworkChanged(const uint32_t bitrate_bps,
const uint8_t fraction_lost,
@@ -88,6 +90,7 @@ class ViEPacedSenderCallback : public PacedSender::Callback {
explicit ViEPacedSenderCallback(ViEEncoder* owner)
: owner_(owner) {
}
+ virtual ~ViEPacedSenderCallback() {}
virtual bool TimeToSendPacket(uint32_t ssrc, uint16_t sequence_number,
int64_t capture_time_ms) {
return owner_->TimeToSendPacket(ssrc, sequence_number, capture_time_ms);
@@ -381,7 +384,6 @@ int32_t ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
ViEId(engine_id_, channel_id_),
"%s: CodecType: %d, width: %u, height: %u", __FUNCTION__,
video_codec.codecType, video_codec.width, video_codec.height);
-
// Setting target width and height for VPM.
if (vpm_.SetTargetResolution(video_codec.width, video_codec.height,
video_codec.maxFramerate) != VPM_OK) {
diff --git a/video_engine/vie_encoder.h b/video_engine/vie_encoder.h
index 368f91bf..2284cf21 100644
--- a/video_engine/vie_encoder.h
+++ b/video_engine/vie_encoder.h
@@ -26,16 +26,16 @@
namespace webrtc {
-class CriticalSectionWrapper;
class Config;
+class CriticalSectionWrapper;
class PacedSender;
class ProcessThread;
class QMVideoSettingsCallback;
class RtpRtcp;
-class VideoCodingModule;
class ViEBitrateObserver;
class ViEEffectFilter;
class ViEEncoderObserver;
+class VideoCodingModule;
class ViEPacedSenderCallback;
class ViEEncoder
diff --git a/video_engine/vie_shared_data.h b/video_engine/vie_shared_data.h
index 19f2ebfa..0d8d100a 100644
--- a/video_engine/vie_shared_data.h
+++ b/video_engine/vie_shared_data.h
@@ -14,11 +14,14 @@
#ifndef WEBRTC_VIDEO_ENGINE_VIE_SHARED_DATA_H_
#define WEBRTC_VIDEO_ENGINE_VIE_SHARED_DATA_H_
+#include <map>
+
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
class Config;
+class CpuOveruseObserver;
class ProcessThread;
class ViEChannelManager;
class ViEInputManager;
@@ -39,6 +42,9 @@ class ViESharedData {
ViEInputManager* input_manager() { return input_manager_.get(); }
ViERenderManager* render_manager() { return render_manager_.get(); }
+ std::map<int, CpuOveruseObserver*>* overuse_observers() {
+ return &overuse_observers_; }
+
private:
const int number_cores_;
@@ -47,6 +53,8 @@ class ViESharedData {
scoped_ptr<ViERenderManager> render_manager_;
ProcessThread* module_process_thread_;
mutable int last_error_;
+
+ std::map<int, CpuOveruseObserver*> overuse_observers_;
};
} // namespace webrtc