Repo created
This commit is contained in:
parent
81b91f4139
commit
f8c34fa5ee
22732 changed files with 4815320 additions and 2 deletions
|
|
@ -0,0 +1,62 @@
|
|||
/*
|
||||
* Copyright 2020 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "video/adaptation/balanced_constraint.h"
|
||||
|
||||
#include <string>
|
||||
#include <utility>
|
||||
|
||||
#include "api/sequence_checker.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
BalancedConstraint::BalancedConstraint(
|
||||
DegradationPreferenceProvider* degradation_preference_provider,
|
||||
const FieldTrialsView& field_trials)
|
||||
: encoder_target_bitrate_bps_(absl::nullopt),
|
||||
balanced_settings_(field_trials),
|
||||
degradation_preference_provider_(degradation_preference_provider) {
|
||||
RTC_DCHECK(degradation_preference_provider_);
|
||||
sequence_checker_.Detach();
|
||||
}
|
||||
|
||||
void BalancedConstraint::OnEncoderTargetBitrateUpdated(
|
||||
absl::optional<uint32_t> encoder_target_bitrate_bps) {
|
||||
RTC_DCHECK_RUN_ON(&sequence_checker_);
|
||||
encoder_target_bitrate_bps_ = std::move(encoder_target_bitrate_bps);
|
||||
}
|
||||
|
||||
bool BalancedConstraint::IsAdaptationUpAllowed(
|
||||
const VideoStreamInputState& input_state,
|
||||
const VideoSourceRestrictions& restrictions_before,
|
||||
const VideoSourceRestrictions& restrictions_after) const {
|
||||
RTC_DCHECK_RUN_ON(&sequence_checker_);
|
||||
// Don't adapt if BalancedDegradationSettings applies and determines this will
|
||||
// exceed bitrate constraints.
|
||||
if (degradation_preference_provider_->degradation_preference() ==
|
||||
DegradationPreference::BALANCED) {
|
||||
int frame_size_pixels = input_state.single_active_stream_pixels().value_or(
|
||||
input_state.frame_size_pixels().value());
|
||||
if (!balanced_settings_.CanAdaptUp(
|
||||
input_state.video_codec_type(), frame_size_pixels,
|
||||
encoder_target_bitrate_bps_.value_or(0))) {
|
||||
return false;
|
||||
}
|
||||
if (DidIncreaseResolution(restrictions_before, restrictions_after) &&
|
||||
!balanced_settings_.CanAdaptUpResolution(
|
||||
input_state.video_codec_type(), frame_size_pixels,
|
||||
encoder_target_bitrate_bps_.value_or(0))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
@ -0,0 +1,53 @@
|
|||
/*
|
||||
* Copyright 2020 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef VIDEO_ADAPTATION_BALANCED_CONSTRAINT_H_
|
||||
#define VIDEO_ADAPTATION_BALANCED_CONSTRAINT_H_
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/field_trials_view.h"
|
||||
#include "api/sequence_checker.h"
|
||||
#include "call/adaptation/adaptation_constraint.h"
|
||||
#include "call/adaptation/degradation_preference_provider.h"
|
||||
#include "rtc_base/experiments/balanced_degradation_settings.h"
|
||||
#include "rtc_base/system/no_unique_address.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class BalancedConstraint : public AdaptationConstraint {
|
||||
public:
|
||||
BalancedConstraint(
|
||||
DegradationPreferenceProvider* degradation_preference_provider,
|
||||
const FieldTrialsView& field_trials);
|
||||
~BalancedConstraint() override = default;
|
||||
|
||||
void OnEncoderTargetBitrateUpdated(
|
||||
absl::optional<uint32_t> encoder_target_bitrate_bps);
|
||||
|
||||
// AdaptationConstraint implementation.
|
||||
std::string Name() const override { return "BalancedConstraint"; }
|
||||
bool IsAdaptationUpAllowed(
|
||||
const VideoStreamInputState& input_state,
|
||||
const VideoSourceRestrictions& restrictions_before,
|
||||
const VideoSourceRestrictions& restrictions_after) const override;
|
||||
|
||||
private:
|
||||
RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_;
|
||||
absl::optional<uint32_t> encoder_target_bitrate_bps_
|
||||
RTC_GUARDED_BY(&sequence_checker_);
|
||||
const BalancedDegradationSettings balanced_settings_;
|
||||
const DegradationPreferenceProvider* degradation_preference_provider_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // VIDEO_ADAPTATION_BALANCED_CONSTRAINT_H_
|
||||
|
|
@ -0,0 +1,83 @@
|
|||
/*
|
||||
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "video/adaptation/bandwidth_quality_scaler_resource.h"
|
||||
|
||||
#include <utility>
|
||||
|
||||
#include "rtc_base/checks.h"
|
||||
#include "rtc_base/experiments/balanced_degradation_settings.h"
|
||||
#include "rtc_base/logging.h"
|
||||
#include "rtc_base/time_utils.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<BandwidthQualityScalerResource>
|
||||
BandwidthQualityScalerResource::Create() {
|
||||
return rtc::make_ref_counted<BandwidthQualityScalerResource>();
|
||||
}
|
||||
|
||||
BandwidthQualityScalerResource::BandwidthQualityScalerResource()
|
||||
: VideoStreamEncoderResource("BandwidthQualityScalerResource"),
|
||||
bandwidth_quality_scaler_(nullptr) {}
|
||||
|
||||
BandwidthQualityScalerResource::~BandwidthQualityScalerResource() {
|
||||
RTC_DCHECK(!bandwidth_quality_scaler_);
|
||||
}
|
||||
|
||||
bool BandwidthQualityScalerResource::is_started() const {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue());
|
||||
return bandwidth_quality_scaler_.get();
|
||||
}
|
||||
|
||||
void BandwidthQualityScalerResource::StartCheckForOveruse(
|
||||
const std::vector<VideoEncoder::ResolutionBitrateLimits>&
|
||||
resolution_bitrate_limits) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue());
|
||||
RTC_DCHECK(!is_started());
|
||||
bandwidth_quality_scaler_ = std::make_unique<BandwidthQualityScaler>(this);
|
||||
|
||||
// If the configuration parameters more than one, we should define and
|
||||
// declare the function BandwidthQualityScaler::Initialize() and call it.
|
||||
bandwidth_quality_scaler_->SetResolutionBitrateLimits(
|
||||
resolution_bitrate_limits);
|
||||
}
|
||||
|
||||
void BandwidthQualityScalerResource::StopCheckForOveruse() {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue());
|
||||
RTC_DCHECK(is_started());
|
||||
// Ensure we have no pending callbacks. This makes it safe to destroy the
|
||||
// BandwidthQualityScaler and even task queues with tasks in-flight.
|
||||
bandwidth_quality_scaler_.reset();
|
||||
}
|
||||
|
||||
void BandwidthQualityScalerResource::OnReportUsageBandwidthHigh() {
|
||||
OnResourceUsageStateMeasured(ResourceUsageState::kOveruse);
|
||||
}
|
||||
|
||||
void BandwidthQualityScalerResource::OnReportUsageBandwidthLow() {
|
||||
OnResourceUsageStateMeasured(ResourceUsageState::kUnderuse);
|
||||
}
|
||||
|
||||
void BandwidthQualityScalerResource::OnEncodeCompleted(
|
||||
const EncodedImage& encoded_image,
|
||||
int64_t time_sent_in_us,
|
||||
int64_t encoded_image_size_bytes) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue());
|
||||
|
||||
if (bandwidth_quality_scaler_) {
|
||||
bandwidth_quality_scaler_->ReportEncodeInfo(
|
||||
encoded_image_size_bytes, time_sent_in_us / 1000,
|
||||
encoded_image._encodedWidth, encoded_image._encodedHeight);
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
/*
|
||||
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef VIDEO_ADAPTATION_BANDWIDTH_QUALITY_SCALER_RESOURCE_H_
|
||||
#define VIDEO_ADAPTATION_BANDWIDTH_QUALITY_SCALER_RESOURCE_H_
|
||||
|
||||
#include <memory>
|
||||
#include <queue>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/video/video_adaptation_reason.h"
|
||||
#include "api/video_codecs/video_encoder.h"
|
||||
#include "call/adaptation/degradation_preference_provider.h"
|
||||
#include "call/adaptation/resource_adaptation_processor_interface.h"
|
||||
#include "modules/video_coding/utility/bandwidth_quality_scaler.h"
|
||||
#include "video/adaptation/video_stream_encoder_resource.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Handles interaction with the BandwidthQualityScaler.
|
||||
class BandwidthQualityScalerResource
|
||||
: public VideoStreamEncoderResource,
|
||||
public BandwidthQualityScalerUsageHandlerInterface {
|
||||
public:
|
||||
static rtc::scoped_refptr<BandwidthQualityScalerResource> Create();
|
||||
|
||||
BandwidthQualityScalerResource();
|
||||
~BandwidthQualityScalerResource() override;
|
||||
|
||||
bool is_started() const;
|
||||
|
||||
void OnEncodeCompleted(const EncodedImage& encoded_image,
|
||||
int64_t time_sent_in_us,
|
||||
int64_t encoded_image_size_bytes);
|
||||
|
||||
void StartCheckForOveruse(
|
||||
const std::vector<VideoEncoder::ResolutionBitrateLimits>&
|
||||
resolution_bitrate_limits);
|
||||
void StopCheckForOveruse();
|
||||
|
||||
// BandwidthScalerQpUsageHandlerInterface implementation.
|
||||
void OnReportUsageBandwidthHigh() override;
|
||||
void OnReportUsageBandwidthLow() override;
|
||||
|
||||
private:
|
||||
std::unique_ptr<BandwidthQualityScaler> bandwidth_quality_scaler_
|
||||
RTC_GUARDED_BY(encoder_queue());
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // VIDEO_ADAPTATION_BANDWIDTH_QUALITY_SCALER_RESOURCE_H_
|
||||
|
|
@ -0,0 +1,88 @@
|
|||
/*
|
||||
* Copyright 2020 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "video/adaptation/bitrate_constraint.h"
|
||||
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#include "api/sequence_checker.h"
|
||||
#include "call/adaptation/video_stream_adapter.h"
|
||||
#include "video/adaptation/video_stream_encoder_resource_manager.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
BitrateConstraint::BitrateConstraint()
|
||||
: encoder_settings_(absl::nullopt),
|
||||
encoder_target_bitrate_bps_(absl::nullopt) {
|
||||
sequence_checker_.Detach();
|
||||
}
|
||||
|
||||
void BitrateConstraint::OnEncoderSettingsUpdated(
|
||||
absl::optional<EncoderSettings> encoder_settings) {
|
||||
RTC_DCHECK_RUN_ON(&sequence_checker_);
|
||||
encoder_settings_ = std::move(encoder_settings);
|
||||
}
|
||||
|
||||
void BitrateConstraint::OnEncoderTargetBitrateUpdated(
|
||||
absl::optional<uint32_t> encoder_target_bitrate_bps) {
|
||||
RTC_DCHECK_RUN_ON(&sequence_checker_);
|
||||
encoder_target_bitrate_bps_ = std::move(encoder_target_bitrate_bps);
|
||||
}
|
||||
|
||||
// Checks if resolution is allowed to adapt up based on the current bitrate and
|
||||
// ResolutionBitrateLimits.min_start_bitrate_bps for the next higher resolution.
|
||||
// Bitrate limits usage is restricted to a single active stream/layer (e.g. when
|
||||
// quality scaling is enabled).
|
||||
bool BitrateConstraint::IsAdaptationUpAllowed(
|
||||
const VideoStreamInputState& input_state,
|
||||
const VideoSourceRestrictions& restrictions_before,
|
||||
const VideoSourceRestrictions& restrictions_after) const {
|
||||
RTC_DCHECK_RUN_ON(&sequence_checker_);
|
||||
// Make sure bitrate limits are not violated.
|
||||
if (DidIncreaseResolution(restrictions_before, restrictions_after)) {
|
||||
if (!encoder_settings_.has_value()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
uint32_t bitrate_bps = encoder_target_bitrate_bps_.value_or(0);
|
||||
if (bitrate_bps == 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (VideoStreamEncoderResourceManager::IsSimulcastOrMultipleSpatialLayers(
|
||||
encoder_settings_->encoder_config(),
|
||||
encoder_settings_->video_codec())) {
|
||||
// Resolution bitrate limits usage is restricted to singlecast.
|
||||
return true;
|
||||
}
|
||||
|
||||
absl::optional<int> current_frame_size_px =
|
||||
input_state.single_active_stream_pixels();
|
||||
if (!current_frame_size_px.has_value()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
absl::optional<VideoEncoder::ResolutionBitrateLimits> bitrate_limits =
|
||||
encoder_settings_->encoder_info().GetEncoderBitrateLimitsForResolution(
|
||||
// Need some sort of expected resulting pixels to be used
|
||||
// instead of unrestricted.
|
||||
GetHigherResolutionThan(*current_frame_size_px));
|
||||
|
||||
if (bitrate_limits.has_value()) {
|
||||
RTC_DCHECK_GE(bitrate_limits->frame_size_pixels, *current_frame_size_px);
|
||||
return bitrate_bps >=
|
||||
static_cast<uint32_t>(bitrate_limits->min_start_bitrate_bps);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
@ -0,0 +1,53 @@
|
|||
/*
|
||||
* Copyright 2020 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef VIDEO_ADAPTATION_BITRATE_CONSTRAINT_H_
|
||||
#define VIDEO_ADAPTATION_BITRATE_CONSTRAINT_H_
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/sequence_checker.h"
|
||||
#include "call/adaptation/adaptation_constraint.h"
|
||||
#include "call/adaptation/encoder_settings.h"
|
||||
#include "call/adaptation/video_source_restrictions.h"
|
||||
#include "call/adaptation/video_stream_input_state.h"
|
||||
#include "rtc_base/system/no_unique_address.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class BitrateConstraint : public AdaptationConstraint {
|
||||
public:
|
||||
BitrateConstraint();
|
||||
~BitrateConstraint() override = default;
|
||||
|
||||
void OnEncoderSettingsUpdated(
|
||||
absl::optional<EncoderSettings> encoder_settings);
|
||||
void OnEncoderTargetBitrateUpdated(
|
||||
absl::optional<uint32_t> encoder_target_bitrate_bps);
|
||||
|
||||
// AdaptationConstraint implementation.
|
||||
std::string Name() const override { return "BitrateConstraint"; }
|
||||
bool IsAdaptationUpAllowed(
|
||||
const VideoStreamInputState& input_state,
|
||||
const VideoSourceRestrictions& restrictions_before,
|
||||
const VideoSourceRestrictions& restrictions_after) const override;
|
||||
|
||||
private:
|
||||
RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_;
|
||||
absl::optional<EncoderSettings> encoder_settings_
|
||||
RTC_GUARDED_BY(&sequence_checker_);
|
||||
absl::optional<uint32_t> encoder_target_bitrate_bps_
|
||||
RTC_GUARDED_BY(&sequence_checker_);
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // VIDEO_ADAPTATION_BITRATE_CONSTRAINT_H_
|
||||
|
|
@ -0,0 +1,105 @@
|
|||
/*
|
||||
* Copyright 2020 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "video/adaptation/encode_usage_resource.h"
|
||||
|
||||
#include <limits>
|
||||
#include <utility>
|
||||
|
||||
#include "rtc_base/checks.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<EncodeUsageResource> EncodeUsageResource::Create(
|
||||
std::unique_ptr<OveruseFrameDetector> overuse_detector) {
|
||||
return rtc::make_ref_counted<EncodeUsageResource>(
|
||||
std::move(overuse_detector));
|
||||
}
|
||||
|
||||
EncodeUsageResource::EncodeUsageResource(
|
||||
std::unique_ptr<OveruseFrameDetector> overuse_detector)
|
||||
: VideoStreamEncoderResource("EncoderUsageResource"),
|
||||
overuse_detector_(std::move(overuse_detector)),
|
||||
is_started_(false),
|
||||
target_frame_rate_(absl::nullopt) {
|
||||
RTC_DCHECK(overuse_detector_);
|
||||
}
|
||||
|
||||
EncodeUsageResource::~EncodeUsageResource() {}
|
||||
|
||||
bool EncodeUsageResource::is_started() const {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue());
|
||||
return is_started_;
|
||||
}
|
||||
|
||||
void EncodeUsageResource::StartCheckForOveruse(CpuOveruseOptions options) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue());
|
||||
RTC_DCHECK(!is_started_);
|
||||
overuse_detector_->StartCheckForOveruse(TaskQueueBase::Current(),
|
||||
std::move(options), this);
|
||||
is_started_ = true;
|
||||
overuse_detector_->OnTargetFramerateUpdated(TargetFrameRateAsInt());
|
||||
}
|
||||
|
||||
void EncodeUsageResource::StopCheckForOveruse() {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue());
|
||||
overuse_detector_->StopCheckForOveruse();
|
||||
is_started_ = false;
|
||||
}
|
||||
|
||||
void EncodeUsageResource::SetTargetFrameRate(
|
||||
absl::optional<double> target_frame_rate) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue());
|
||||
if (target_frame_rate == target_frame_rate_)
|
||||
return;
|
||||
target_frame_rate_ = target_frame_rate;
|
||||
if (is_started_)
|
||||
overuse_detector_->OnTargetFramerateUpdated(TargetFrameRateAsInt());
|
||||
}
|
||||
|
||||
void EncodeUsageResource::OnEncodeStarted(const VideoFrame& cropped_frame,
|
||||
int64_t time_when_first_seen_us) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue());
|
||||
// TODO(hbos): Rename FrameCaptured() to something more appropriate (e.g.
|
||||
// "OnEncodeStarted"?) or revise usage.
|
||||
overuse_detector_->FrameCaptured(cropped_frame, time_when_first_seen_us);
|
||||
}
|
||||
|
||||
void EncodeUsageResource::OnEncodeCompleted(
|
||||
uint32_t timestamp,
|
||||
int64_t time_sent_in_us,
|
||||
int64_t capture_time_us,
|
||||
absl::optional<int> encode_duration_us) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue());
|
||||
// TODO(hbos): Rename FrameSent() to something more appropriate (e.g.
|
||||
// "OnEncodeCompleted"?).
|
||||
overuse_detector_->FrameSent(timestamp, time_sent_in_us, capture_time_us,
|
||||
encode_duration_us);
|
||||
}
|
||||
|
||||
void EncodeUsageResource::AdaptUp() {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue());
|
||||
OnResourceUsageStateMeasured(ResourceUsageState::kUnderuse);
|
||||
}
|
||||
|
||||
void EncodeUsageResource::AdaptDown() {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue());
|
||||
OnResourceUsageStateMeasured(ResourceUsageState::kOveruse);
|
||||
}
|
||||
|
||||
int EncodeUsageResource::TargetFrameRateAsInt() {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue());
|
||||
return target_frame_rate_.has_value()
|
||||
? static_cast<int>(target_frame_rate_.value())
|
||||
: std::numeric_limits<int>::max();
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
@ -0,0 +1,68 @@
|
|||
/*
|
||||
* Copyright 2020 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef VIDEO_ADAPTATION_ENCODE_USAGE_RESOURCE_H_
|
||||
#define VIDEO_ADAPTATION_ENCODE_USAGE_RESOURCE_H_
|
||||
|
||||
#include <memory>
|
||||
#include <string>
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/video/video_adaptation_reason.h"
|
||||
#include "video/adaptation/overuse_frame_detector.h"
|
||||
#include "video/adaptation/video_stream_encoder_resource.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Handles interaction with the OveruseDetector.
|
||||
// TODO(hbos): Add unittests specific to this class, it is currently only tested
|
||||
// indirectly by usage in the ResourceAdaptationProcessor (which is only tested
|
||||
// because of its usage in VideoStreamEncoder); all tests are currently in
|
||||
// video_stream_encoder_unittest.cc.
|
||||
class EncodeUsageResource : public VideoStreamEncoderResource,
|
||||
public OveruseFrameDetectorObserverInterface {
|
||||
public:
|
||||
static rtc::scoped_refptr<EncodeUsageResource> Create(
|
||||
std::unique_ptr<OveruseFrameDetector> overuse_detector);
|
||||
|
||||
explicit EncodeUsageResource(
|
||||
std::unique_ptr<OveruseFrameDetector> overuse_detector);
|
||||
~EncodeUsageResource() override;
|
||||
|
||||
bool is_started() const;
|
||||
|
||||
void StartCheckForOveruse(CpuOveruseOptions options);
|
||||
void StopCheckForOveruse();
|
||||
|
||||
void SetTargetFrameRate(absl::optional<double> target_frame_rate);
|
||||
void OnEncodeStarted(const VideoFrame& cropped_frame,
|
||||
int64_t time_when_first_seen_us);
|
||||
void OnEncodeCompleted(uint32_t timestamp,
|
||||
int64_t time_sent_in_us,
|
||||
int64_t capture_time_us,
|
||||
absl::optional<int> encode_duration_us);
|
||||
|
||||
// OveruseFrameDetectorObserverInterface implementation.
|
||||
void AdaptUp() override;
|
||||
void AdaptDown() override;
|
||||
|
||||
private:
|
||||
int TargetFrameRateAsInt();
|
||||
|
||||
const std::unique_ptr<OveruseFrameDetector> overuse_detector_
|
||||
RTC_GUARDED_BY(encoder_queue());
|
||||
bool is_started_ RTC_GUARDED_BY(encoder_queue());
|
||||
absl::optional<double> target_frame_rate_ RTC_GUARDED_BY(encoder_queue());
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // VIDEO_ADAPTATION_ENCODE_USAGE_RESOURCE_H_
|
||||
|
|
@ -0,0 +1,669 @@
|
|||
/*
|
||||
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "video/adaptation/overuse_frame_detector.h"
|
||||
|
||||
#include <math.h>
|
||||
#include <stdio.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <list>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <utility>
|
||||
|
||||
#include "api/video/video_frame.h"
|
||||
#include "rtc_base/checks.h"
|
||||
#include "rtc_base/logging.h"
|
||||
#include "rtc_base/numerics/exp_filter.h"
|
||||
#include "rtc_base/time_utils.h"
|
||||
#include "rtc_base/trace_event.h"
|
||||
#include "system_wrappers/include/field_trial.h"
|
||||
|
||||
#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
|
||||
#include <mach/mach.h>
|
||||
#endif // defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace {
|
||||
const int64_t kCheckForOveruseIntervalMs = 5000;
|
||||
const int64_t kTimeToFirstCheckForOveruseMs = 100;
|
||||
|
||||
// Delay between consecutive rampups. (Used for quick recovery.)
|
||||
const int kQuickRampUpDelayMs = 10 * 1000;
|
||||
// Delay between rampup attempts. Initially uses standard, scales up to max.
|
||||
const int kStandardRampUpDelayMs = 40 * 1000;
|
||||
const int kMaxRampUpDelayMs = 240 * 1000;
|
||||
// Expontential back-off factor, to prevent annoying up-down behaviour.
|
||||
const double kRampUpBackoffFactor = 2.0;
|
||||
|
||||
// Max number of overuses detected before always applying the rampup delay.
|
||||
const int kMaxOverusesBeforeApplyRampupDelay = 4;
|
||||
|
||||
// The maximum exponent to use in VCMExpFilter.
|
||||
const float kMaxExp = 7.0f;
|
||||
// Default value used before first reconfiguration.
|
||||
const int kDefaultFrameRate = 30;
|
||||
// Default sample diff, default frame rate.
|
||||
const float kDefaultSampleDiffMs = 1000.0f / kDefaultFrameRate;
|
||||
// A factor applied to the sample diff on OnTargetFramerateUpdated to determine
|
||||
// a max limit for the sample diff. For instance, with a framerate of 30fps,
|
||||
// the sample diff is capped to (1000 / 30) * 1.35 = 45ms. This prevents
|
||||
// triggering too soon if there are individual very large outliers.
|
||||
const float kMaxSampleDiffMarginFactor = 1.35f;
|
||||
// Minimum framerate allowed for usage calculation. This prevents crazy long
|
||||
// encode times from being accepted if the frame rate happens to be low.
|
||||
const int kMinFramerate = 7;
|
||||
const int kMaxFramerate = 30;
|
||||
|
||||
// Class for calculating the processing usage on the send-side (the average
|
||||
// processing time of a frame divided by the average time difference between
|
||||
// captured frames).
|
||||
class SendProcessingUsage1 : public OveruseFrameDetector::ProcessingUsage {
|
||||
public:
|
||||
explicit SendProcessingUsage1(const CpuOveruseOptions& options)
|
||||
: kWeightFactorFrameDiff(0.998f),
|
||||
kWeightFactorProcessing(0.995f),
|
||||
kInitialSampleDiffMs(40.0f),
|
||||
options_(options),
|
||||
count_(0),
|
||||
last_processed_capture_time_us_(-1),
|
||||
max_sample_diff_ms_(kDefaultSampleDiffMs * kMaxSampleDiffMarginFactor),
|
||||
filtered_processing_ms_(new rtc::ExpFilter(kWeightFactorProcessing)),
|
||||
filtered_frame_diff_ms_(new rtc::ExpFilter(kWeightFactorFrameDiff)) {
|
||||
Reset();
|
||||
}
|
||||
~SendProcessingUsage1() override {}
|
||||
|
||||
void Reset() override {
|
||||
frame_timing_.clear();
|
||||
count_ = 0;
|
||||
last_processed_capture_time_us_ = -1;
|
||||
max_sample_diff_ms_ = kDefaultSampleDiffMs * kMaxSampleDiffMarginFactor;
|
||||
filtered_frame_diff_ms_->Reset(kWeightFactorFrameDiff);
|
||||
filtered_frame_diff_ms_->Apply(1.0f, kInitialSampleDiffMs);
|
||||
filtered_processing_ms_->Reset(kWeightFactorProcessing);
|
||||
filtered_processing_ms_->Apply(1.0f, InitialProcessingMs());
|
||||
}
|
||||
|
||||
void SetMaxSampleDiffMs(float diff_ms) override {
|
||||
max_sample_diff_ms_ = diff_ms;
|
||||
}
|
||||
|
||||
void FrameCaptured(const VideoFrame& frame,
|
||||
int64_t time_when_first_seen_us,
|
||||
int64_t last_capture_time_us) override {
|
||||
if (last_capture_time_us != -1)
|
||||
AddCaptureSample(1e-3 * (time_when_first_seen_us - last_capture_time_us));
|
||||
|
||||
frame_timing_.push_back(FrameTiming(frame.timestamp_us(), frame.timestamp(),
|
||||
time_when_first_seen_us));
|
||||
}
|
||||
|
||||
absl::optional<int> FrameSent(
|
||||
uint32_t timestamp,
|
||||
int64_t time_sent_in_us,
|
||||
int64_t /* capture_time_us */,
|
||||
absl::optional<int> /* encode_duration_us */) override {
|
||||
absl::optional<int> encode_duration_us;
|
||||
// Delay before reporting actual encoding time, used to have the ability to
|
||||
// detect total encoding time when encoding more than one layer. Encoding is
|
||||
// here assumed to finish within a second (or that we get enough long-time
|
||||
// samples before one second to trigger an overuse even when this is not the
|
||||
// case).
|
||||
static const int64_t kEncodingTimeMeasureWindowMs = 1000;
|
||||
for (auto& it : frame_timing_) {
|
||||
if (it.timestamp == timestamp) {
|
||||
it.last_send_us = time_sent_in_us;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// TODO(pbos): Handle the case/log errors when not finding the corresponding
|
||||
// frame (either very slow encoding or incorrect wrong timestamps returned
|
||||
// from the encoder).
|
||||
// This is currently the case for all frames on ChromeOS, so logging them
|
||||
// would be spammy, and triggering overuse would be wrong.
|
||||
// https://crbug.com/350106
|
||||
while (!frame_timing_.empty()) {
|
||||
FrameTiming timing = frame_timing_.front();
|
||||
if (time_sent_in_us - timing.capture_us <
|
||||
kEncodingTimeMeasureWindowMs * rtc::kNumMicrosecsPerMillisec) {
|
||||
break;
|
||||
}
|
||||
if (timing.last_send_us != -1) {
|
||||
encode_duration_us.emplace(
|
||||
static_cast<int>(timing.last_send_us - timing.capture_us));
|
||||
|
||||
if (last_processed_capture_time_us_ != -1) {
|
||||
int64_t diff_us = timing.capture_us - last_processed_capture_time_us_;
|
||||
AddSample(1e-3 * (*encode_duration_us), 1e-3 * diff_us);
|
||||
}
|
||||
last_processed_capture_time_us_ = timing.capture_us;
|
||||
}
|
||||
frame_timing_.pop_front();
|
||||
}
|
||||
return encode_duration_us;
|
||||
}
|
||||
|
||||
int Value() override {
|
||||
if (count_ < static_cast<uint32_t>(options_.min_frame_samples)) {
|
||||
return static_cast<int>(InitialUsageInPercent() + 0.5f);
|
||||
}
|
||||
float frame_diff_ms = std::max(filtered_frame_diff_ms_->filtered(), 1.0f);
|
||||
frame_diff_ms = std::min(frame_diff_ms, max_sample_diff_ms_);
|
||||
float encode_usage_percent =
|
||||
100.0f * filtered_processing_ms_->filtered() / frame_diff_ms;
|
||||
return static_cast<int>(encode_usage_percent + 0.5);
|
||||
}
|
||||
|
||||
private:
|
||||
struct FrameTiming {
|
||||
FrameTiming(int64_t capture_time_us, uint32_t timestamp, int64_t now)
|
||||
: capture_time_us(capture_time_us),
|
||||
timestamp(timestamp),
|
||||
capture_us(now),
|
||||
last_send_us(-1) {}
|
||||
int64_t capture_time_us;
|
||||
uint32_t timestamp;
|
||||
int64_t capture_us;
|
||||
int64_t last_send_us;
|
||||
};
|
||||
|
||||
void AddCaptureSample(float sample_ms) {
|
||||
float exp = sample_ms / kDefaultSampleDiffMs;
|
||||
exp = std::min(exp, kMaxExp);
|
||||
filtered_frame_diff_ms_->Apply(exp, sample_ms);
|
||||
}
|
||||
|
||||
void AddSample(float processing_ms, int64_t diff_last_sample_ms) {
|
||||
++count_;
|
||||
float exp = diff_last_sample_ms / kDefaultSampleDiffMs;
|
||||
exp = std::min(exp, kMaxExp);
|
||||
filtered_processing_ms_->Apply(exp, processing_ms);
|
||||
}
|
||||
|
||||
float InitialUsageInPercent() const {
|
||||
// Start in between the underuse and overuse threshold.
|
||||
return (options_.low_encode_usage_threshold_percent +
|
||||
options_.high_encode_usage_threshold_percent) /
|
||||
2.0f;
|
||||
}
|
||||
|
||||
float InitialProcessingMs() const {
|
||||
return InitialUsageInPercent() * kInitialSampleDiffMs / 100;
|
||||
}
|
||||
|
||||
const float kWeightFactorFrameDiff;
|
||||
const float kWeightFactorProcessing;
|
||||
const float kInitialSampleDiffMs;
|
||||
|
||||
const CpuOveruseOptions options_;
|
||||
std::list<FrameTiming> frame_timing_;
|
||||
uint64_t count_;
|
||||
int64_t last_processed_capture_time_us_;
|
||||
float max_sample_diff_ms_;
|
||||
std::unique_ptr<rtc::ExpFilter> filtered_processing_ms_;
|
||||
std::unique_ptr<rtc::ExpFilter> filtered_frame_diff_ms_;
|
||||
};
|
||||
|
||||
// New cpu load estimator.
|
||||
// TODO(bugs.webrtc.org/8504): For some period of time, we need to
|
||||
// switch between the two versions of the estimator for experiments.
|
||||
// When problems are sorted out, the old estimator should be deleted.
|
||||
class SendProcessingUsage2 : public OveruseFrameDetector::ProcessingUsage {
|
||||
public:
|
||||
explicit SendProcessingUsage2(const CpuOveruseOptions& options)
|
||||
: options_(options) {
|
||||
Reset();
|
||||
}
|
||||
~SendProcessingUsage2() override = default;
|
||||
|
||||
void Reset() override {
|
||||
prev_time_us_ = -1;
|
||||
// Start in between the underuse and overuse threshold.
|
||||
load_estimate_ = (options_.low_encode_usage_threshold_percent +
|
||||
options_.high_encode_usage_threshold_percent) /
|
||||
200.0;
|
||||
}
|
||||
|
||||
void SetMaxSampleDiffMs(float /* diff_ms */) override {}
|
||||
|
||||
void FrameCaptured(const VideoFrame& frame,
|
||||
int64_t time_when_first_seen_us,
|
||||
int64_t last_capture_time_us) override {}
|
||||
|
||||
absl::optional<int> FrameSent(
|
||||
uint32_t /* timestamp */,
|
||||
int64_t /* time_sent_in_us */,
|
||||
int64_t capture_time_us,
|
||||
absl::optional<int> encode_duration_us) override {
|
||||
if (encode_duration_us) {
|
||||
int duration_per_frame_us =
|
||||
DurationPerInputFrame(capture_time_us, *encode_duration_us);
|
||||
if (prev_time_us_ != -1) {
|
||||
if (capture_time_us < prev_time_us_) {
|
||||
// The weighting in AddSample assumes that samples are processed with
|
||||
// non-decreasing measurement timestamps. We could implement
|
||||
// appropriate weights for samples arriving late, but since it is a
|
||||
// rare case, keep things simple, by just pushing those measurements a
|
||||
// bit forward in time.
|
||||
capture_time_us = prev_time_us_;
|
||||
}
|
||||
AddSample(1e-6 * duration_per_frame_us,
|
||||
1e-6 * (capture_time_us - prev_time_us_));
|
||||
}
|
||||
}
|
||||
prev_time_us_ = capture_time_us;
|
||||
|
||||
return encode_duration_us;
|
||||
}
|
||||
|
||||
private:
|
||||
void AddSample(double encode_time, double diff_time) {
|
||||
RTC_CHECK_GE(diff_time, 0.0);
|
||||
|
||||
// Use the filter update
|
||||
//
|
||||
// load <-- x/d (1-exp (-d/T)) + exp (-d/T) load
|
||||
//
|
||||
// where we must take care for small d, using the proper limit
|
||||
// (1 - exp(-d/tau)) / d = 1/tau - d/2tau^2 + O(d^2)
|
||||
double tau = (1e-3 * options_.filter_time_ms);
|
||||
double e = diff_time / tau;
|
||||
double c;
|
||||
if (e < 0.0001) {
|
||||
c = (1 - e / 2) / tau;
|
||||
} else {
|
||||
c = -expm1(-e) / diff_time;
|
||||
}
|
||||
load_estimate_ = c * encode_time + exp(-e) * load_estimate_;
|
||||
}
|
||||
|
||||
int64_t DurationPerInputFrame(int64_t capture_time_us,
|
||||
int64_t encode_time_us) {
|
||||
// Discard data on old frames; limit 2 seconds.
|
||||
static constexpr int64_t kMaxAge = 2 * rtc::kNumMicrosecsPerSec;
|
||||
for (auto it = max_encode_time_per_input_frame_.begin();
|
||||
it != max_encode_time_per_input_frame_.end() &&
|
||||
it->first < capture_time_us - kMaxAge;) {
|
||||
it = max_encode_time_per_input_frame_.erase(it);
|
||||
}
|
||||
|
||||
std::map<int64_t, int>::iterator it;
|
||||
bool inserted;
|
||||
std::tie(it, inserted) = max_encode_time_per_input_frame_.emplace(
|
||||
capture_time_us, encode_time_us);
|
||||
if (inserted) {
|
||||
// First encoded frame for this input frame.
|
||||
return encode_time_us;
|
||||
}
|
||||
if (encode_time_us <= it->second) {
|
||||
// Shorter encode time than previous frame (unlikely). Count it as being
|
||||
// done in parallel.
|
||||
return 0;
|
||||
}
|
||||
// Record new maximum encode time, and return increase from previous max.
|
||||
int increase = encode_time_us - it->second;
|
||||
it->second = encode_time_us;
|
||||
return increase;
|
||||
}
|
||||
|
||||
int Value() override {
|
||||
return static_cast<int>(100.0 * load_estimate_ + 0.5);
|
||||
}
|
||||
|
||||
const CpuOveruseOptions options_;
|
||||
// Indexed by the capture timestamp, used as frame id.
|
||||
std::map<int64_t, int> max_encode_time_per_input_frame_;
|
||||
|
||||
int64_t prev_time_us_ = -1;
|
||||
double load_estimate_;
|
||||
};
|
||||
|
||||
// Class used for manual testing of overuse, enabled via field trial flag.
|
||||
class OverdoseInjector : public OveruseFrameDetector::ProcessingUsage {
|
||||
public:
|
||||
OverdoseInjector(std::unique_ptr<OveruseFrameDetector::ProcessingUsage> usage,
|
||||
int64_t normal_period_ms,
|
||||
int64_t overuse_period_ms,
|
||||
int64_t underuse_period_ms)
|
||||
: usage_(std::move(usage)),
|
||||
normal_period_ms_(normal_period_ms),
|
||||
overuse_period_ms_(overuse_period_ms),
|
||||
underuse_period_ms_(underuse_period_ms),
|
||||
state_(State::kNormal),
|
||||
last_toggling_ms_(-1) {
|
||||
RTC_DCHECK_GT(overuse_period_ms, 0);
|
||||
RTC_DCHECK_GT(normal_period_ms, 0);
|
||||
RTC_LOG(LS_INFO) << "Simulating overuse with intervals " << normal_period_ms
|
||||
<< "ms normal mode, " << overuse_period_ms
|
||||
<< "ms overuse mode.";
|
||||
}
|
||||
|
||||
~OverdoseInjector() override {}
|
||||
|
||||
void Reset() override { usage_->Reset(); }
|
||||
|
||||
void SetMaxSampleDiffMs(float diff_ms) override {
|
||||
usage_->SetMaxSampleDiffMs(diff_ms);
|
||||
}
|
||||
|
||||
void FrameCaptured(const VideoFrame& frame,
|
||||
int64_t time_when_first_seen_us,
|
||||
int64_t last_capture_time_us) override {
|
||||
usage_->FrameCaptured(frame, time_when_first_seen_us, last_capture_time_us);
|
||||
}
|
||||
|
||||
absl::optional<int> FrameSent(
|
||||
// These two argument used by old estimator.
|
||||
uint32_t timestamp,
|
||||
int64_t time_sent_in_us,
|
||||
// And these two by the new estimator.
|
||||
int64_t capture_time_us,
|
||||
absl::optional<int> encode_duration_us) override {
|
||||
return usage_->FrameSent(timestamp, time_sent_in_us, capture_time_us,
|
||||
encode_duration_us);
|
||||
}
|
||||
|
||||
int Value() override {
|
||||
int64_t now_ms = rtc::TimeMillis();
|
||||
if (last_toggling_ms_ == -1) {
|
||||
last_toggling_ms_ = now_ms;
|
||||
} else {
|
||||
switch (state_) {
|
||||
case State::kNormal:
|
||||
if (now_ms > last_toggling_ms_ + normal_period_ms_) {
|
||||
state_ = State::kOveruse;
|
||||
last_toggling_ms_ = now_ms;
|
||||
RTC_LOG(LS_INFO) << "Simulating CPU overuse.";
|
||||
}
|
||||
break;
|
||||
case State::kOveruse:
|
||||
if (now_ms > last_toggling_ms_ + overuse_period_ms_) {
|
||||
state_ = State::kUnderuse;
|
||||
last_toggling_ms_ = now_ms;
|
||||
RTC_LOG(LS_INFO) << "Simulating CPU underuse.";
|
||||
}
|
||||
break;
|
||||
case State::kUnderuse:
|
||||
if (now_ms > last_toggling_ms_ + underuse_period_ms_) {
|
||||
state_ = State::kNormal;
|
||||
last_toggling_ms_ = now_ms;
|
||||
RTC_LOG(LS_INFO) << "Actual CPU overuse measurements in effect.";
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
absl::optional<int> overried_usage_value;
|
||||
switch (state_) {
|
||||
case State::kNormal:
|
||||
break;
|
||||
case State::kOveruse:
|
||||
overried_usage_value.emplace(250);
|
||||
break;
|
||||
case State::kUnderuse:
|
||||
overried_usage_value.emplace(5);
|
||||
break;
|
||||
}
|
||||
|
||||
return overried_usage_value.value_or(usage_->Value());
|
||||
}
|
||||
|
||||
private:
|
||||
const std::unique_ptr<OveruseFrameDetector::ProcessingUsage> usage_;
|
||||
const int64_t normal_period_ms_;
|
||||
const int64_t overuse_period_ms_;
|
||||
const int64_t underuse_period_ms_;
|
||||
enum class State { kNormal, kOveruse, kUnderuse } state_;
|
||||
int64_t last_toggling_ms_;
|
||||
};
|
||||
|
||||
} // namespace
|
||||
|
||||
std::unique_ptr<OveruseFrameDetector::ProcessingUsage>
|
||||
OveruseFrameDetector::CreateProcessingUsage(const CpuOveruseOptions& options) {
|
||||
std::unique_ptr<ProcessingUsage> instance;
|
||||
if (options.filter_time_ms > 0) {
|
||||
instance = std::make_unique<SendProcessingUsage2>(options);
|
||||
} else {
|
||||
instance = std::make_unique<SendProcessingUsage1>(options);
|
||||
}
|
||||
std::string toggling_interval =
|
||||
field_trial::FindFullName("WebRTC-ForceSimulatedOveruseIntervalMs");
|
||||
if (!toggling_interval.empty()) {
|
||||
int normal_period_ms = 0;
|
||||
int overuse_period_ms = 0;
|
||||
int underuse_period_ms = 0;
|
||||
if (sscanf(toggling_interval.c_str(), "%d-%d-%d", &normal_period_ms,
|
||||
&overuse_period_ms, &underuse_period_ms) == 3) {
|
||||
if (normal_period_ms > 0 && overuse_period_ms > 0 &&
|
||||
underuse_period_ms > 0) {
|
||||
instance = std::make_unique<OverdoseInjector>(
|
||||
std::move(instance), normal_period_ms, overuse_period_ms,
|
||||
underuse_period_ms);
|
||||
} else {
|
||||
RTC_LOG(LS_WARNING)
|
||||
<< "Invalid (non-positive) normal/overuse/underuse periods: "
|
||||
<< normal_period_ms << " / " << overuse_period_ms << " / "
|
||||
<< underuse_period_ms;
|
||||
}
|
||||
} else {
|
||||
RTC_LOG(LS_WARNING) << "Malformed toggling interval: "
|
||||
<< toggling_interval;
|
||||
}
|
||||
}
|
||||
return instance;
|
||||
}
|
||||
|
||||
OveruseFrameDetector::OveruseFrameDetector(
|
||||
CpuOveruseMetricsObserver* metrics_observer)
|
||||
: metrics_observer_(metrics_observer),
|
||||
num_process_times_(0),
|
||||
// TODO(bugs.webrtc.org/9078): Use absl::optional
|
||||
last_capture_time_us_(-1),
|
||||
num_pixels_(0),
|
||||
max_framerate_(kDefaultFrameRate),
|
||||
last_overuse_time_ms_(-1),
|
||||
checks_above_threshold_(0),
|
||||
num_overuse_detections_(0),
|
||||
last_rampup_time_ms_(-1),
|
||||
in_quick_rampup_(false),
|
||||
current_rampup_delay_ms_(kStandardRampUpDelayMs) {
|
||||
task_checker_.Detach();
|
||||
ParseFieldTrial({&filter_time_constant_},
|
||||
field_trial::FindFullName("WebRTC-CpuLoadEstimator"));
|
||||
}
|
||||
|
||||
OveruseFrameDetector::~OveruseFrameDetector() {}
|
||||
|
||||
void OveruseFrameDetector::StartCheckForOveruse(
|
||||
TaskQueueBase* task_queue_base,
|
||||
const CpuOveruseOptions& options,
|
||||
OveruseFrameDetectorObserverInterface* overuse_observer) {
|
||||
RTC_DCHECK_RUN_ON(&task_checker_);
|
||||
RTC_DCHECK(!check_overuse_task_.Running());
|
||||
RTC_DCHECK(overuse_observer != nullptr);
|
||||
|
||||
SetOptions(options);
|
||||
check_overuse_task_ = RepeatingTaskHandle::DelayedStart(
|
||||
task_queue_base, TimeDelta::Millis(kTimeToFirstCheckForOveruseMs),
|
||||
[this, overuse_observer] {
|
||||
CheckForOveruse(overuse_observer);
|
||||
return TimeDelta::Millis(kCheckForOveruseIntervalMs);
|
||||
});
|
||||
}
|
||||
void OveruseFrameDetector::StopCheckForOveruse() {
|
||||
RTC_DCHECK_RUN_ON(&task_checker_);
|
||||
check_overuse_task_.Stop();
|
||||
}
|
||||
|
||||
void OveruseFrameDetector::EncodedFrameTimeMeasured(int encode_duration_ms) {
|
||||
RTC_DCHECK_RUN_ON(&task_checker_);
|
||||
encode_usage_percent_ = usage_->Value();
|
||||
|
||||
metrics_observer_->OnEncodedFrameTimeMeasured(encode_duration_ms,
|
||||
*encode_usage_percent_);
|
||||
}
|
||||
|
||||
bool OveruseFrameDetector::FrameSizeChanged(int num_pixels) const {
|
||||
RTC_DCHECK_RUN_ON(&task_checker_);
|
||||
if (num_pixels != num_pixels_) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool OveruseFrameDetector::FrameTimeoutDetected(int64_t now_us) const {
|
||||
RTC_DCHECK_RUN_ON(&task_checker_);
|
||||
if (last_capture_time_us_ == -1)
|
||||
return false;
|
||||
return (now_us - last_capture_time_us_) >
|
||||
options_.frame_timeout_interval_ms * rtc::kNumMicrosecsPerMillisec;
|
||||
}
|
||||
|
||||
void OveruseFrameDetector::ResetAll(int num_pixels) {
|
||||
// Reset state, as a result resolution being changed. Do not however change
|
||||
// the current frame rate back to the default.
|
||||
RTC_DCHECK_RUN_ON(&task_checker_);
|
||||
num_pixels_ = num_pixels;
|
||||
usage_->Reset();
|
||||
last_capture_time_us_ = -1;
|
||||
num_process_times_ = 0;
|
||||
encode_usage_percent_ = absl::nullopt;
|
||||
OnTargetFramerateUpdated(max_framerate_);
|
||||
}
|
||||
|
||||
void OveruseFrameDetector::OnTargetFramerateUpdated(int framerate_fps) {
|
||||
RTC_DCHECK_RUN_ON(&task_checker_);
|
||||
RTC_DCHECK_GE(framerate_fps, 0);
|
||||
max_framerate_ = std::min(kMaxFramerate, framerate_fps);
|
||||
usage_->SetMaxSampleDiffMs((1000 / std::max(kMinFramerate, max_framerate_)) *
|
||||
kMaxSampleDiffMarginFactor);
|
||||
}
|
||||
|
||||
void OveruseFrameDetector::FrameCaptured(const VideoFrame& frame,
|
||||
int64_t time_when_first_seen_us) {
|
||||
RTC_DCHECK_RUN_ON(&task_checker_);
|
||||
|
||||
if (FrameSizeChanged(frame.width() * frame.height()) ||
|
||||
FrameTimeoutDetected(time_when_first_seen_us)) {
|
||||
ResetAll(frame.width() * frame.height());
|
||||
}
|
||||
|
||||
usage_->FrameCaptured(frame, time_when_first_seen_us, last_capture_time_us_);
|
||||
last_capture_time_us_ = time_when_first_seen_us;
|
||||
}
|
||||
|
||||
void OveruseFrameDetector::FrameSent(uint32_t timestamp,
|
||||
int64_t time_sent_in_us,
|
||||
int64_t capture_time_us,
|
||||
absl::optional<int> encode_duration_us) {
|
||||
RTC_DCHECK_RUN_ON(&task_checker_);
|
||||
encode_duration_us = usage_->FrameSent(timestamp, time_sent_in_us,
|
||||
capture_time_us, encode_duration_us);
|
||||
|
||||
if (encode_duration_us) {
|
||||
EncodedFrameTimeMeasured(*encode_duration_us /
|
||||
rtc::kNumMicrosecsPerMillisec);
|
||||
}
|
||||
}
|
||||
|
||||
void OveruseFrameDetector::CheckForOveruse(
|
||||
OveruseFrameDetectorObserverInterface* observer) {
|
||||
RTC_DCHECK_RUN_ON(&task_checker_);
|
||||
RTC_DCHECK(observer);
|
||||
++num_process_times_;
|
||||
if (num_process_times_ <= options_.min_process_count ||
|
||||
!encode_usage_percent_)
|
||||
return;
|
||||
|
||||
int64_t now_ms = rtc::TimeMillis();
|
||||
const char* action = "NoAction";
|
||||
|
||||
if (IsOverusing(*encode_usage_percent_)) {
|
||||
// If the last thing we did was going up, and now have to back down, we need
|
||||
// to check if this peak was short. If so we should back off to avoid going
|
||||
// back and forth between this load, the system doesn't seem to handle it.
|
||||
bool check_for_backoff = last_rampup_time_ms_ > last_overuse_time_ms_;
|
||||
if (check_for_backoff) {
|
||||
if (now_ms - last_rampup_time_ms_ < kStandardRampUpDelayMs ||
|
||||
num_overuse_detections_ > kMaxOverusesBeforeApplyRampupDelay) {
|
||||
// Going up was not ok for very long, back off.
|
||||
current_rampup_delay_ms_ *= kRampUpBackoffFactor;
|
||||
if (current_rampup_delay_ms_ > kMaxRampUpDelayMs)
|
||||
current_rampup_delay_ms_ = kMaxRampUpDelayMs;
|
||||
} else {
|
||||
// Not currently backing off, reset rampup delay.
|
||||
current_rampup_delay_ms_ = kStandardRampUpDelayMs;
|
||||
}
|
||||
}
|
||||
|
||||
last_overuse_time_ms_ = now_ms;
|
||||
in_quick_rampup_ = false;
|
||||
checks_above_threshold_ = 0;
|
||||
++num_overuse_detections_;
|
||||
|
||||
observer->AdaptDown();
|
||||
action = "AdaptDown";
|
||||
} else if (IsUnderusing(*encode_usage_percent_, now_ms)) {
|
||||
last_rampup_time_ms_ = now_ms;
|
||||
in_quick_rampup_ = true;
|
||||
|
||||
observer->AdaptUp();
|
||||
action = "AdaptUp";
|
||||
}
|
||||
TRACE_EVENT2("webrtc", "OveruseFrameDetector::CheckForOveruse",
|
||||
"encode_usage_percent", *encode_usage_percent_, "action",
|
||||
TRACE_STR_COPY(action));
|
||||
|
||||
int rampup_delay =
|
||||
in_quick_rampup_ ? kQuickRampUpDelayMs : current_rampup_delay_ms_;
|
||||
|
||||
RTC_LOG(LS_INFO) << "CheckForOveruse: encode usage " << *encode_usage_percent_
|
||||
<< " overuse detections " << num_overuse_detections_
|
||||
<< " rampup delay " << rampup_delay << " action " << action;
|
||||
}
|
||||
|
||||
void OveruseFrameDetector::SetOptions(const CpuOveruseOptions& options) {
|
||||
RTC_DCHECK_RUN_ON(&task_checker_);
|
||||
options_ = options;
|
||||
|
||||
// Time constant config overridable by field trial.
|
||||
if (filter_time_constant_) {
|
||||
options_.filter_time_ms = filter_time_constant_->ms();
|
||||
}
|
||||
// Force reset with next frame.
|
||||
num_pixels_ = 0;
|
||||
usage_ = CreateProcessingUsage(options);
|
||||
}
|
||||
|
||||
bool OveruseFrameDetector::IsOverusing(int usage_percent) {
|
||||
RTC_DCHECK_RUN_ON(&task_checker_);
|
||||
|
||||
if (usage_percent >= options_.high_encode_usage_threshold_percent) {
|
||||
++checks_above_threshold_;
|
||||
} else {
|
||||
checks_above_threshold_ = 0;
|
||||
}
|
||||
return checks_above_threshold_ >= options_.high_threshold_consecutive_count;
|
||||
}
|
||||
|
||||
bool OveruseFrameDetector::IsUnderusing(int usage_percent, int64_t time_now) {
|
||||
RTC_DCHECK_RUN_ON(&task_checker_);
|
||||
int delay = in_quick_rampup_ ? kQuickRampUpDelayMs : current_rampup_delay_ms_;
|
||||
if (time_now < last_rampup_time_ms_ + delay)
|
||||
return false;
|
||||
|
||||
return usage_percent < options_.low_encode_usage_threshold_percent;
|
||||
}
|
||||
} // namespace webrtc
|
||||
|
|
@ -0,0 +1,179 @@
|
|||
/*
|
||||
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef VIDEO_ADAPTATION_OVERUSE_FRAME_DETECTOR_H_
|
||||
#define VIDEO_ADAPTATION_OVERUSE_FRAME_DETECTOR_H_
|
||||
|
||||
#include <list>
|
||||
#include <memory>
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/field_trials_view.h"
|
||||
#include "api/sequence_checker.h"
|
||||
#include "api/task_queue/task_queue_base.h"
|
||||
#include "rtc_base/experiments/field_trial_parser.h"
|
||||
#include "rtc_base/numerics/exp_filter.h"
|
||||
#include "rtc_base/system/no_unique_address.h"
|
||||
#include "rtc_base/task_utils/repeating_task.h"
|
||||
#include "rtc_base/thread_annotations.h"
|
||||
#include "video/video_stream_encoder_observer.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class VideoFrame;
|
||||
|
||||
struct CpuOveruseOptions {
|
||||
// Threshold for triggering overuse.
|
||||
int high_encode_usage_threshold_percent = 85;
|
||||
// Threshold for triggering underuse.
|
||||
// Note that we make the interval 2x+epsilon wide, since libyuv scaling steps
|
||||
// are close to that (when squared). This wide interval makes sure that
|
||||
// scaling up or down does not jump all the way across the interval.
|
||||
int low_encode_usage_threshold_percent =
|
||||
(high_encode_usage_threshold_percent - 1) / 2;
|
||||
// General settings.
|
||||
// The maximum allowed interval between two frames before resetting
|
||||
// estimations.
|
||||
int frame_timeout_interval_ms = 1500;
|
||||
// The minimum number of frames required.
|
||||
int min_frame_samples = 120;
|
||||
|
||||
// The number of initial process times required before
|
||||
// triggering an overuse/underuse.
|
||||
int min_process_count = 3;
|
||||
// The number of consecutive checks above the high threshold before triggering
|
||||
// an overuse.
|
||||
int high_threshold_consecutive_count = 2;
|
||||
// New estimator enabled if this is set non-zero.
|
||||
int filter_time_ms = 0; // Time constant for averaging
|
||||
};
|
||||
|
||||
class OveruseFrameDetectorObserverInterface {
|
||||
public:
|
||||
// Called to signal that we can handle larger or more frequent frames.
|
||||
virtual void AdaptUp() = 0;
|
||||
// Called to signal that the source should reduce the resolution or framerate.
|
||||
virtual void AdaptDown() = 0;
|
||||
|
||||
protected:
|
||||
virtual ~OveruseFrameDetectorObserverInterface() {}
|
||||
};
|
||||
|
||||
// Use to detect system overuse based on the send-side processing time of
|
||||
// incoming frames. All methods must be called on a single task queue but it can
|
||||
// be created and destroyed on an arbitrary thread.
|
||||
// OveruseFrameDetector::StartCheckForOveruse must be called to periodically
|
||||
// check for overuse.
|
||||
class OveruseFrameDetector {
|
||||
public:
|
||||
explicit OveruseFrameDetector(CpuOveruseMetricsObserver* metrics_observer);
|
||||
virtual ~OveruseFrameDetector();
|
||||
|
||||
OveruseFrameDetector(const OveruseFrameDetector&) = delete;
|
||||
OveruseFrameDetector& operator=(const OveruseFrameDetector&) = delete;
|
||||
|
||||
// Start to periodically check for overuse.
|
||||
void StartCheckForOveruse(
|
||||
TaskQueueBase* task_queue_base,
|
||||
const CpuOveruseOptions& options,
|
||||
OveruseFrameDetectorObserverInterface* overuse_observer);
|
||||
|
||||
// StopCheckForOveruse must be called before destruction if
|
||||
// StartCheckForOveruse has been called.
|
||||
void StopCheckForOveruse();
|
||||
|
||||
// Defines the current maximum framerate targeted by the capturer. This is
|
||||
// used to make sure the encode usage percent doesn't drop unduly if the
|
||||
// capturer has quiet periods (for instance caused by screen capturers with
|
||||
// variable capture rate depending on content updates), otherwise we might
|
||||
// experience adaptation toggling.
|
||||
virtual void OnTargetFramerateUpdated(int framerate_fps);
|
||||
|
||||
// Called for each captured frame.
|
||||
void FrameCaptured(const VideoFrame& frame, int64_t time_when_first_seen_us);
|
||||
|
||||
// Called for each sent frame.
|
||||
void FrameSent(uint32_t timestamp,
|
||||
int64_t time_sent_in_us,
|
||||
int64_t capture_time_us,
|
||||
absl::optional<int> encode_duration_us);
|
||||
|
||||
// Interface for cpu load estimation. Intended for internal use only.
|
||||
class ProcessingUsage {
|
||||
public:
|
||||
virtual void Reset() = 0;
|
||||
virtual void SetMaxSampleDiffMs(float diff_ms) = 0;
|
||||
virtual void FrameCaptured(const VideoFrame& frame,
|
||||
int64_t time_when_first_seen_us,
|
||||
int64_t last_capture_time_us) = 0;
|
||||
// Returns encode_time in us, if there's a new measurement.
|
||||
virtual absl::optional<int> FrameSent(
|
||||
// These two argument used by old estimator.
|
||||
uint32_t timestamp,
|
||||
int64_t time_sent_in_us,
|
||||
// And these two by the new estimator.
|
||||
int64_t capture_time_us,
|
||||
absl::optional<int> encode_duration_us) = 0;
|
||||
|
||||
virtual int Value() = 0;
|
||||
virtual ~ProcessingUsage() = default;
|
||||
};
|
||||
|
||||
protected:
|
||||
// Protected for test purposes.
|
||||
void CheckForOveruse(OveruseFrameDetectorObserverInterface* overuse_observer);
|
||||
void SetOptions(const CpuOveruseOptions& options);
|
||||
|
||||
CpuOveruseOptions options_;
|
||||
|
||||
private:
|
||||
void EncodedFrameTimeMeasured(int encode_duration_ms);
|
||||
bool IsOverusing(int encode_usage_percent);
|
||||
bool IsUnderusing(int encode_usage_percent, int64_t time_now);
|
||||
|
||||
bool FrameTimeoutDetected(int64_t now) const;
|
||||
bool FrameSizeChanged(int num_pixels) const;
|
||||
|
||||
void ResetAll(int num_pixels);
|
||||
|
||||
static std::unique_ptr<ProcessingUsage> CreateProcessingUsage(
|
||||
const CpuOveruseOptions& options);
|
||||
|
||||
RTC_NO_UNIQUE_ADDRESS SequenceChecker task_checker_;
|
||||
// Owned by the task queue from where StartCheckForOveruse is called.
|
||||
RepeatingTaskHandle check_overuse_task_ RTC_GUARDED_BY(task_checker_);
|
||||
|
||||
// Stats metrics.
|
||||
CpuOveruseMetricsObserver* const metrics_observer_;
|
||||
absl::optional<int> encode_usage_percent_ RTC_GUARDED_BY(task_checker_);
|
||||
|
||||
int64_t num_process_times_ RTC_GUARDED_BY(task_checker_);
|
||||
|
||||
int64_t last_capture_time_us_ RTC_GUARDED_BY(task_checker_);
|
||||
|
||||
// Number of pixels of last captured frame.
|
||||
int num_pixels_ RTC_GUARDED_BY(task_checker_);
|
||||
int max_framerate_ RTC_GUARDED_BY(task_checker_);
|
||||
int64_t last_overuse_time_ms_ RTC_GUARDED_BY(task_checker_);
|
||||
int checks_above_threshold_ RTC_GUARDED_BY(task_checker_);
|
||||
int num_overuse_detections_ RTC_GUARDED_BY(task_checker_);
|
||||
int64_t last_rampup_time_ms_ RTC_GUARDED_BY(task_checker_);
|
||||
bool in_quick_rampup_ RTC_GUARDED_BY(task_checker_);
|
||||
int current_rampup_delay_ms_ RTC_GUARDED_BY(task_checker_);
|
||||
|
||||
std::unique_ptr<ProcessingUsage> usage_ RTC_PT_GUARDED_BY(task_checker_);
|
||||
|
||||
// If set by field trial, overrides CpuOveruseOptions::filter_time_ms.
|
||||
FieldTrialOptional<TimeDelta> filter_time_constant_{"tau"};
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // VIDEO_ADAPTATION_OVERUSE_FRAME_DETECTOR_H_
|
||||
|
|
@ -0,0 +1,101 @@
|
|||
/*
|
||||
* Copyright 2020 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "video/adaptation/pixel_limit_resource.h"
|
||||
|
||||
#include "api/sequence_checker.h"
|
||||
#include "api/units/time_delta.h"
|
||||
#include "call/adaptation/video_stream_adapter.h"
|
||||
#include "rtc_base/checks.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace {
|
||||
|
||||
constexpr TimeDelta kResourceUsageCheckIntervalMs = TimeDelta::Seconds(5);
|
||||
|
||||
} // namespace
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<PixelLimitResource> PixelLimitResource::Create(
|
||||
TaskQueueBase* task_queue,
|
||||
VideoStreamInputStateProvider* input_state_provider) {
|
||||
return rtc::make_ref_counted<PixelLimitResource>(task_queue,
|
||||
input_state_provider);
|
||||
}
|
||||
|
||||
PixelLimitResource::PixelLimitResource(
|
||||
TaskQueueBase* task_queue,
|
||||
VideoStreamInputStateProvider* input_state_provider)
|
||||
: task_queue_(task_queue),
|
||||
input_state_provider_(input_state_provider),
|
||||
max_pixels_(absl::nullopt) {
|
||||
RTC_DCHECK(task_queue_);
|
||||
RTC_DCHECK(input_state_provider_);
|
||||
}
|
||||
|
||||
PixelLimitResource::~PixelLimitResource() {
|
||||
RTC_DCHECK(!listener_);
|
||||
RTC_DCHECK(!repeating_task_.Running());
|
||||
}
|
||||
|
||||
void PixelLimitResource::SetMaxPixels(int max_pixels) {
|
||||
RTC_DCHECK_RUN_ON(task_queue_);
|
||||
max_pixels_ = max_pixels;
|
||||
}
|
||||
|
||||
void PixelLimitResource::SetResourceListener(ResourceListener* listener) {
|
||||
RTC_DCHECK_RUN_ON(task_queue_);
|
||||
listener_ = listener;
|
||||
if (listener_) {
|
||||
repeating_task_.Stop();
|
||||
repeating_task_ = RepeatingTaskHandle::Start(task_queue_, [&] {
|
||||
RTC_DCHECK_RUN_ON(task_queue_);
|
||||
if (!listener_) {
|
||||
// We don't have a listener so resource adaptation must not be running,
|
||||
// try again later.
|
||||
return kResourceUsageCheckIntervalMs;
|
||||
}
|
||||
if (!max_pixels_.has_value()) {
|
||||
// No pixel limit configured yet, try again later.
|
||||
return kResourceUsageCheckIntervalMs;
|
||||
}
|
||||
absl::optional<int> frame_size_pixels =
|
||||
input_state_provider_->InputState().frame_size_pixels();
|
||||
if (!frame_size_pixels.has_value()) {
|
||||
// We haven't observed a frame yet so we don't know if it's going to be
|
||||
// too big or too small, try again later.
|
||||
return kResourceUsageCheckIntervalMs;
|
||||
}
|
||||
int current_pixels = frame_size_pixels.value();
|
||||
int target_pixel_upper_bounds = max_pixels_.value();
|
||||
// To avoid toggling, we allow any resolutions between
|
||||
// `target_pixel_upper_bounds` and video_stream_adapter.h's
|
||||
// GetLowerResolutionThan(). This is the pixels we end up if we adapt down
|
||||
// from `target_pixel_upper_bounds`.
|
||||
int target_pixels_lower_bounds =
|
||||
GetLowerResolutionThan(target_pixel_upper_bounds);
|
||||
if (current_pixels > target_pixel_upper_bounds) {
|
||||
listener_->OnResourceUsageStateMeasured(
|
||||
rtc::scoped_refptr<Resource>(this), ResourceUsageState::kOveruse);
|
||||
} else if (current_pixels < target_pixels_lower_bounds) {
|
||||
listener_->OnResourceUsageStateMeasured(
|
||||
rtc::scoped_refptr<Resource>(this), ResourceUsageState::kUnderuse);
|
||||
}
|
||||
return kResourceUsageCheckIntervalMs;
|
||||
});
|
||||
} else {
|
||||
repeating_task_.Stop();
|
||||
}
|
||||
// The task must be running if we have a listener.
|
||||
RTC_DCHECK(repeating_task_.Running() || !listener_);
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
/*
|
||||
* Copyright 2020 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef VIDEO_ADAPTATION_PIXEL_LIMIT_RESOURCE_H_
|
||||
#define VIDEO_ADAPTATION_PIXEL_LIMIT_RESOURCE_H_
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/adaptation/resource.h"
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "call/adaptation/video_stream_input_state_provider.h"
|
||||
#include "rtc_base/task_utils/repeating_task.h"
|
||||
#include "rtc_base/thread_annotations.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// An adaptation resource designed to be used in the TestBed. Used to simulate
|
||||
// being CPU limited.
|
||||
//
|
||||
// Periodically reports "overuse" or "underuse" (every 5 seconds) until the
|
||||
// stream is within the bounds specified in terms of a maximum resolution and
|
||||
// one resolution step lower than that (this avoids toggling when this is the
|
||||
// only resource in play). When multiple resources come in to play some amount
|
||||
// of toggling is still possible in edge cases but that is OK for testing
|
||||
// purposes.
|
||||
class PixelLimitResource : public Resource {
|
||||
public:
|
||||
static rtc::scoped_refptr<PixelLimitResource> Create(
|
||||
TaskQueueBase* task_queue,
|
||||
VideoStreamInputStateProvider* input_state_provider);
|
||||
|
||||
PixelLimitResource(TaskQueueBase* task_queue,
|
||||
VideoStreamInputStateProvider* input_state_provider);
|
||||
~PixelLimitResource() override;
|
||||
|
||||
void SetMaxPixels(int max_pixels);
|
||||
|
||||
// Resource implementation.
|
||||
std::string Name() const override { return "PixelLimitResource"; }
|
||||
void SetResourceListener(ResourceListener* listener) override;
|
||||
|
||||
private:
|
||||
TaskQueueBase* const task_queue_;
|
||||
VideoStreamInputStateProvider* const input_state_provider_;
|
||||
absl::optional<int> max_pixels_ RTC_GUARDED_BY(task_queue_);
|
||||
webrtc::ResourceListener* listener_ RTC_GUARDED_BY(task_queue_);
|
||||
RepeatingTaskHandle repeating_task_ RTC_GUARDED_BY(task_queue_);
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // VIDEO_ADAPTATION_PIXEL_LIMIT_RESOURCE_H_
|
||||
|
|
@ -0,0 +1,88 @@
|
|||
/*
|
||||
* Copyright 2020 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "video/adaptation/quality_rampup_experiment_helper.h"
|
||||
|
||||
#include <memory>
|
||||
#include <utility>
|
||||
|
||||
#include "rtc_base/logging.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
QualityRampUpExperimentHelper::QualityRampUpExperimentHelper(
|
||||
QualityRampUpExperimentListener* experiment_listener,
|
||||
Clock* clock,
|
||||
QualityRampupExperiment experiment)
|
||||
: experiment_listener_(experiment_listener),
|
||||
clock_(clock),
|
||||
quality_rampup_experiment_(std::move(experiment)),
|
||||
cpu_adapted_(false),
|
||||
qp_resolution_adaptations_(0) {
|
||||
RTC_DCHECK(experiment_listener_);
|
||||
RTC_DCHECK(clock_);
|
||||
}
|
||||
|
||||
std::unique_ptr<QualityRampUpExperimentHelper>
|
||||
QualityRampUpExperimentHelper::CreateIfEnabled(
|
||||
QualityRampUpExperimentListener* experiment_listener,
|
||||
Clock* clock) {
|
||||
QualityRampupExperiment experiment = QualityRampupExperiment::ParseSettings();
|
||||
if (experiment.Enabled()) {
|
||||
return std::unique_ptr<QualityRampUpExperimentHelper>(
|
||||
new QualityRampUpExperimentHelper(experiment_listener, clock,
|
||||
experiment));
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
void QualityRampUpExperimentHelper::ConfigureQualityRampupExperiment(
|
||||
bool reset,
|
||||
absl::optional<uint32_t> pixels,
|
||||
absl::optional<DataRate> max_bitrate) {
|
||||
if (reset)
|
||||
quality_rampup_experiment_.Reset();
|
||||
if (pixels && max_bitrate)
|
||||
quality_rampup_experiment_.SetMaxBitrate(*pixels, max_bitrate->kbps());
|
||||
}
|
||||
|
||||
void QualityRampUpExperimentHelper::PerformQualityRampupExperiment(
|
||||
rtc::scoped_refptr<QualityScalerResource> quality_scaler_resource,
|
||||
DataRate bandwidth,
|
||||
DataRate encoder_target_bitrate,
|
||||
absl::optional<DataRate> max_bitrate) {
|
||||
if (!quality_scaler_resource->is_started() || !max_bitrate)
|
||||
return;
|
||||
|
||||
int64_t now_ms = clock_->TimeInMilliseconds();
|
||||
|
||||
bool try_quality_rampup = false;
|
||||
if (quality_rampup_experiment_.BwHigh(now_ms, bandwidth.kbps())) {
|
||||
// Verify that encoder is at max bitrate and the QP is low.
|
||||
if (encoder_target_bitrate == *max_bitrate &&
|
||||
quality_scaler_resource->QpFastFilterLow()) {
|
||||
try_quality_rampup = true;
|
||||
}
|
||||
}
|
||||
if (try_quality_rampup && qp_resolution_adaptations_ > 0 && !cpu_adapted_) {
|
||||
experiment_listener_->OnQualityRampUp();
|
||||
}
|
||||
}
|
||||
|
||||
void QualityRampUpExperimentHelper::cpu_adapted(bool cpu_adapted) {
|
||||
cpu_adapted_ = cpu_adapted;
|
||||
}
|
||||
|
||||
void QualityRampUpExperimentHelper::qp_resolution_adaptations(
|
||||
int qp_resolution_adaptations) {
|
||||
qp_resolution_adaptations_ = qp_resolution_adaptations;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
@ -0,0 +1,71 @@
|
|||
/*
|
||||
* Copyright 2020 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef VIDEO_ADAPTATION_QUALITY_RAMPUP_EXPERIMENT_HELPER_H_
|
||||
#define VIDEO_ADAPTATION_QUALITY_RAMPUP_EXPERIMENT_HELPER_H_
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/units/data_rate.h"
|
||||
#include "rtc_base/experiments/quality_rampup_experiment.h"
|
||||
#include "system_wrappers/include/clock.h"
|
||||
#include "video/adaptation/quality_scaler_resource.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class QualityRampUpExperimentListener {
|
||||
public:
|
||||
virtual ~QualityRampUpExperimentListener() = default;
|
||||
virtual void OnQualityRampUp() = 0;
|
||||
};
|
||||
|
||||
// Helper class for orchestrating the WebRTC-Video-QualityRampupSettings
|
||||
// experiment.
|
||||
class QualityRampUpExperimentHelper {
|
||||
public:
|
||||
// Returns a QualityRampUpExperimentHelper if the experiment is enabled,
|
||||
// an nullptr otherwise.
|
||||
static std::unique_ptr<QualityRampUpExperimentHelper> CreateIfEnabled(
|
||||
QualityRampUpExperimentListener* experiment_listener,
|
||||
Clock* clock);
|
||||
|
||||
QualityRampUpExperimentHelper(const QualityRampUpExperimentHelper&) = delete;
|
||||
QualityRampUpExperimentHelper& operator=(
|
||||
const QualityRampUpExperimentHelper&) = delete;
|
||||
|
||||
void cpu_adapted(bool cpu_adapted);
|
||||
void qp_resolution_adaptations(int qp_adaptations);
|
||||
|
||||
void ConfigureQualityRampupExperiment(bool reset,
|
||||
absl::optional<uint32_t> pixels,
|
||||
absl::optional<DataRate> max_bitrate);
|
||||
|
||||
void PerformQualityRampupExperiment(
|
||||
rtc::scoped_refptr<QualityScalerResource> quality_scaler_resource,
|
||||
DataRate bandwidth,
|
||||
DataRate encoder_target_bitrate,
|
||||
absl::optional<DataRate> max_bitrate);
|
||||
|
||||
private:
|
||||
QualityRampUpExperimentHelper(
|
||||
QualityRampUpExperimentListener* experiment_listener,
|
||||
Clock* clock,
|
||||
QualityRampupExperiment experiment);
|
||||
QualityRampUpExperimentListener* const experiment_listener_;
|
||||
Clock* clock_;
|
||||
QualityRampupExperiment quality_rampup_experiment_;
|
||||
bool cpu_adapted_;
|
||||
int qp_resolution_adaptations_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // VIDEO_ADAPTATION_QUALITY_RAMPUP_EXPERIMENT_HELPER_H_
|
||||
|
|
@ -0,0 +1,101 @@
|
|||
/*
|
||||
* Copyright 2020 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "video/adaptation/quality_scaler_resource.h"
|
||||
|
||||
#include <utility>
|
||||
|
||||
#include "api/field_trials_view.h"
|
||||
#include "rtc_base/checks.h"
|
||||
#include "rtc_base/experiments/balanced_degradation_settings.h"
|
||||
#include "rtc_base/time_utils.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<QualityScalerResource> QualityScalerResource::Create() {
|
||||
return rtc::make_ref_counted<QualityScalerResource>();
|
||||
}
|
||||
|
||||
QualityScalerResource::QualityScalerResource()
|
||||
: VideoStreamEncoderResource("QualityScalerResource"),
|
||||
quality_scaler_(nullptr) {}
|
||||
|
||||
QualityScalerResource::~QualityScalerResource() {
|
||||
RTC_DCHECK(!quality_scaler_);
|
||||
}
|
||||
|
||||
bool QualityScalerResource::is_started() const {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue());
|
||||
return quality_scaler_.get();
|
||||
}
|
||||
|
||||
void QualityScalerResource::StartCheckForOveruse(
|
||||
VideoEncoder::QpThresholds qp_thresholds,
|
||||
const FieldTrialsView& field_trials) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue());
|
||||
RTC_DCHECK(!is_started());
|
||||
quality_scaler_ = std::make_unique<QualityScaler>(
|
||||
this, std::move(qp_thresholds), field_trials);
|
||||
}
|
||||
|
||||
void QualityScalerResource::StopCheckForOveruse() {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue());
|
||||
RTC_DCHECK(is_started());
|
||||
// Ensure we have no pending callbacks. This makes it safe to destroy the
|
||||
// QualityScaler and even task queues with tasks in-flight.
|
||||
quality_scaler_.reset();
|
||||
}
|
||||
|
||||
void QualityScalerResource::SetQpThresholds(
|
||||
VideoEncoder::QpThresholds qp_thresholds) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue());
|
||||
RTC_DCHECK(is_started());
|
||||
quality_scaler_->SetQpThresholds(std::move(qp_thresholds));
|
||||
}
|
||||
|
||||
bool QualityScalerResource::QpFastFilterLow() {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue());
|
||||
RTC_DCHECK(is_started());
|
||||
return quality_scaler_->QpFastFilterLow();
|
||||
}
|
||||
|
||||
void QualityScalerResource::OnEncodeCompleted(const EncodedImage& encoded_image,
|
||||
int64_t time_sent_in_us) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue());
|
||||
if (quality_scaler_ && encoded_image.qp_ >= 0) {
|
||||
quality_scaler_->ReportQp(encoded_image.qp_, time_sent_in_us);
|
||||
}
|
||||
}
|
||||
|
||||
void QualityScalerResource::OnFrameDropped(
|
||||
EncodedImageCallback::DropReason reason) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue());
|
||||
if (!quality_scaler_)
|
||||
return;
|
||||
switch (reason) {
|
||||
case EncodedImageCallback::DropReason::kDroppedByMediaOptimizations:
|
||||
quality_scaler_->ReportDroppedFrameByMediaOpt();
|
||||
break;
|
||||
case EncodedImageCallback::DropReason::kDroppedByEncoder:
|
||||
quality_scaler_->ReportDroppedFrameByEncoder();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void QualityScalerResource::OnReportQpUsageHigh() {
|
||||
OnResourceUsageStateMeasured(ResourceUsageState::kOveruse);
|
||||
}
|
||||
|
||||
void QualityScalerResource::OnReportQpUsageLow() {
|
||||
OnResourceUsageStateMeasured(ResourceUsageState::kUnderuse);
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
@ -0,0 +1,61 @@
|
|||
/*
|
||||
* Copyright 2020 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef VIDEO_ADAPTATION_QUALITY_SCALER_RESOURCE_H_
|
||||
#define VIDEO_ADAPTATION_QUALITY_SCALER_RESOURCE_H_
|
||||
|
||||
#include <memory>
|
||||
#include <queue>
|
||||
#include <string>
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/field_trials_view.h"
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/video/video_adaptation_reason.h"
|
||||
#include "api/video_codecs/video_encoder.h"
|
||||
#include "call/adaptation/degradation_preference_provider.h"
|
||||
#include "call/adaptation/resource_adaptation_processor_interface.h"
|
||||
#include "modules/video_coding/utility/quality_scaler.h"
|
||||
#include "video/adaptation/video_stream_encoder_resource.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Handles interaction with the QualityScaler.
|
||||
class QualityScalerResource : public VideoStreamEncoderResource,
|
||||
public QualityScalerQpUsageHandlerInterface {
|
||||
public:
|
||||
static rtc::scoped_refptr<QualityScalerResource> Create();
|
||||
|
||||
QualityScalerResource();
|
||||
~QualityScalerResource() override;
|
||||
|
||||
bool is_started() const;
|
||||
|
||||
void StartCheckForOveruse(VideoEncoder::QpThresholds qp_thresholds,
|
||||
const FieldTrialsView& field_trials);
|
||||
void StopCheckForOveruse();
|
||||
void SetQpThresholds(VideoEncoder::QpThresholds qp_thresholds);
|
||||
bool QpFastFilterLow();
|
||||
void OnEncodeCompleted(const EncodedImage& encoded_image,
|
||||
int64_t time_sent_in_us);
|
||||
void OnFrameDropped(EncodedImageCallback::DropReason reason);
|
||||
|
||||
// QualityScalerQpUsageHandlerInterface implementation.
|
||||
void OnReportQpUsageHigh() override;
|
||||
void OnReportQpUsageLow() override;
|
||||
|
||||
private:
|
||||
std::unique_ptr<QualityScaler> quality_scaler_
|
||||
RTC_GUARDED_BY(encoder_queue());
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // VIDEO_ADAPTATION_QUALITY_SCALER_RESOURCE_H_
|
||||
|
|
@ -0,0 +1,63 @@
|
|||
/*
|
||||
* Copyright 2020 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "video/adaptation/video_stream_encoder_resource.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <utility>
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
VideoStreamEncoderResource::VideoStreamEncoderResource(std::string name)
|
||||
: lock_(),
|
||||
name_(std::move(name)),
|
||||
encoder_queue_(nullptr),
|
||||
listener_(nullptr) {}
|
||||
|
||||
VideoStreamEncoderResource::~VideoStreamEncoderResource() {
|
||||
RTC_DCHECK(!listener_)
|
||||
<< "There is a listener depending on a VideoStreamEncoderResource being "
|
||||
<< "destroyed.";
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResource::RegisterEncoderTaskQueue(
|
||||
TaskQueueBase* encoder_queue) {
|
||||
RTC_DCHECK(!encoder_queue_);
|
||||
RTC_DCHECK(encoder_queue);
|
||||
encoder_queue_ = encoder_queue;
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResource::SetResourceListener(
|
||||
ResourceListener* listener) {
|
||||
// If you want to change listener you need to unregister the old listener by
|
||||
// setting it to null first.
|
||||
MutexLock crit(&lock_);
|
||||
RTC_DCHECK(!listener_ || !listener) << "A listener is already set";
|
||||
listener_ = listener;
|
||||
}
|
||||
|
||||
std::string VideoStreamEncoderResource::Name() const {
|
||||
return name_;
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResource::OnResourceUsageStateMeasured(
|
||||
ResourceUsageState usage_state) {
|
||||
MutexLock crit(&lock_);
|
||||
if (listener_) {
|
||||
listener_->OnResourceUsageStateMeasured(rtc::scoped_refptr<Resource>(this),
|
||||
usage_state);
|
||||
}
|
||||
}
|
||||
|
||||
TaskQueueBase* VideoStreamEncoderResource::encoder_queue() const {
|
||||
return encoder_queue_;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Copyright 2020 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef VIDEO_ADAPTATION_VIDEO_STREAM_ENCODER_RESOURCE_H_
|
||||
#define VIDEO_ADAPTATION_VIDEO_STREAM_ENCODER_RESOURCE_H_
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/adaptation/resource.h"
|
||||
#include "api/sequence_checker.h"
|
||||
#include "api/task_queue/task_queue_base.h"
|
||||
#include "call/adaptation/adaptation_constraint.h"
|
||||
#include "rtc_base/synchronization/mutex.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class VideoStreamEncoderResource : public Resource {
|
||||
public:
|
||||
~VideoStreamEncoderResource() override;
|
||||
|
||||
// Registering task queues must be performed as part of initialization.
|
||||
void RegisterEncoderTaskQueue(TaskQueueBase* encoder_queue);
|
||||
|
||||
// Resource implementation.
|
||||
std::string Name() const override;
|
||||
void SetResourceListener(ResourceListener* listener) override;
|
||||
|
||||
protected:
|
||||
explicit VideoStreamEncoderResource(std::string name);
|
||||
|
||||
void OnResourceUsageStateMeasured(ResourceUsageState usage_state);
|
||||
|
||||
// The caller is responsible for ensuring the task queue is still valid.
|
||||
TaskQueueBase* encoder_queue() const;
|
||||
|
||||
private:
|
||||
mutable Mutex lock_;
|
||||
const std::string name_;
|
||||
// Treated as const after initialization.
|
||||
TaskQueueBase* encoder_queue_;
|
||||
ResourceListener* listener_ RTC_GUARDED_BY(lock_);
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // VIDEO_ADAPTATION_VIDEO_STREAM_ENCODER_RESOURCE_H_
|
||||
|
|
@ -0,0 +1,857 @@
|
|||
/*
|
||||
* Copyright 2020 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "video/adaptation/video_stream_encoder_resource_manager.h"
|
||||
|
||||
#include <stdio.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <cmath>
|
||||
#include <limits>
|
||||
#include <memory>
|
||||
#include <utility>
|
||||
|
||||
#include "absl/algorithm/container.h"
|
||||
#include "absl/base/macros.h"
|
||||
#include "api/adaptation/resource.h"
|
||||
#include "api/field_trials_view.h"
|
||||
#include "api/sequence_checker.h"
|
||||
#include "api/task_queue/task_queue_base.h"
|
||||
#include "api/video/video_adaptation_reason.h"
|
||||
#include "api/video/video_source_interface.h"
|
||||
#include "call/adaptation/video_source_restrictions.h"
|
||||
#include "modules/video_coding/svc/scalability_mode_util.h"
|
||||
#include "rtc_base/checks.h"
|
||||
#include "rtc_base/logging.h"
|
||||
#include "rtc_base/numerics/safe_conversions.h"
|
||||
#include "rtc_base/strings/string_builder.h"
|
||||
#include "rtc_base/time_utils.h"
|
||||
#include "rtc_base/trace_event.h"
|
||||
#include "video/adaptation/quality_scaler_resource.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
const int kDefaultInputPixelsWidth = 176;
|
||||
const int kDefaultInputPixelsHeight = 144;
|
||||
|
||||
namespace {
|
||||
|
||||
constexpr const char* kPixelLimitResourceFieldTrialName =
|
||||
"WebRTC-PixelLimitResource";
|
||||
|
||||
bool IsResolutionScalingEnabled(DegradationPreference degradation_preference) {
|
||||
return degradation_preference == DegradationPreference::MAINTAIN_FRAMERATE ||
|
||||
degradation_preference == DegradationPreference::BALANCED;
|
||||
}
|
||||
|
||||
bool IsFramerateScalingEnabled(DegradationPreference degradation_preference) {
|
||||
return degradation_preference == DegradationPreference::MAINTAIN_RESOLUTION ||
|
||||
degradation_preference == DegradationPreference::BALANCED;
|
||||
}
|
||||
|
||||
std::string ToString(VideoAdaptationReason reason) {
|
||||
switch (reason) {
|
||||
case VideoAdaptationReason::kQuality:
|
||||
return "quality";
|
||||
case VideoAdaptationReason::kCpu:
|
||||
return "cpu";
|
||||
}
|
||||
RTC_CHECK_NOTREACHED();
|
||||
}
|
||||
|
||||
std::vector<bool> GetActiveLayersFlags(const VideoCodec& codec) {
|
||||
std::vector<bool> flags;
|
||||
if (codec.codecType == VideoCodecType::kVideoCodecVP9) {
|
||||
flags.resize(codec.VP9().numberOfSpatialLayers);
|
||||
for (size_t i = 0; i < flags.size(); ++i) {
|
||||
flags[i] = codec.spatialLayers[i].active;
|
||||
}
|
||||
} else {
|
||||
flags.resize(codec.numberOfSimulcastStreams);
|
||||
for (size_t i = 0; i < flags.size(); ++i) {
|
||||
flags[i] = codec.simulcastStream[i].active;
|
||||
}
|
||||
}
|
||||
return flags;
|
||||
}
|
||||
|
||||
bool EqualFlags(const std::vector<bool>& a, const std::vector<bool>& b) {
|
||||
if (a.size() != b.size())
|
||||
return false;
|
||||
return std::equal(a.begin(), a.end(), b.begin());
|
||||
}
|
||||
|
||||
absl::optional<DataRate> GetSingleActiveLayerMaxBitrate(
|
||||
const VideoCodec& codec) {
|
||||
int num_active = 0;
|
||||
absl::optional<DataRate> max_bitrate;
|
||||
if (codec.codecType == VideoCodecType::kVideoCodecVP9) {
|
||||
for (int i = 0; i < codec.VP9().numberOfSpatialLayers; ++i) {
|
||||
if (codec.spatialLayers[i].active) {
|
||||
++num_active;
|
||||
max_bitrate =
|
||||
DataRate::KilobitsPerSec(codec.spatialLayers[i].maxBitrate);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (int i = 0; i < codec.numberOfSimulcastStreams; ++i) {
|
||||
if (codec.simulcastStream[i].active) {
|
||||
++num_active;
|
||||
max_bitrate =
|
||||
DataRate::KilobitsPerSec(codec.simulcastStream[i].maxBitrate);
|
||||
}
|
||||
}
|
||||
}
|
||||
return (num_active > 1) ? absl::nullopt : max_bitrate;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
class VideoStreamEncoderResourceManager::InitialFrameDropper {
|
||||
public:
|
||||
explicit InitialFrameDropper(
|
||||
rtc::scoped_refptr<QualityScalerResource> quality_scaler_resource,
|
||||
const FieldTrialsView& field_trials)
|
||||
: quality_scaler_resource_(quality_scaler_resource),
|
||||
quality_scaler_settings_(field_trials),
|
||||
has_seen_first_bwe_drop_(false),
|
||||
set_start_bitrate_(DataRate::Zero()),
|
||||
set_start_bitrate_time_ms_(0),
|
||||
initial_framedrop_(0),
|
||||
use_bandwidth_allocation_(false),
|
||||
bandwidth_allocation_(DataRate::Zero()),
|
||||
last_input_width_(0),
|
||||
last_input_height_(0),
|
||||
last_stream_configuration_changed_(false) {
|
||||
RTC_DCHECK(quality_scaler_resource_);
|
||||
}
|
||||
|
||||
// Output signal.
|
||||
bool DropInitialFrames() const {
|
||||
return initial_framedrop_ < kMaxInitialFramedrop;
|
||||
}
|
||||
|
||||
absl::optional<uint32_t> single_active_stream_pixels() const {
|
||||
return single_active_stream_pixels_;
|
||||
}
|
||||
|
||||
absl::optional<uint32_t> UseBandwidthAllocationBps() const {
|
||||
return (use_bandwidth_allocation_ &&
|
||||
bandwidth_allocation_ > DataRate::Zero())
|
||||
? absl::optional<uint32_t>(bandwidth_allocation_.bps())
|
||||
: absl::nullopt;
|
||||
}
|
||||
|
||||
bool last_stream_configuration_changed() const {
|
||||
return last_stream_configuration_changed_;
|
||||
}
|
||||
|
||||
// Input signals.
|
||||
void SetStartBitrate(DataRate start_bitrate, int64_t now_ms) {
|
||||
set_start_bitrate_ = start_bitrate;
|
||||
set_start_bitrate_time_ms_ = now_ms;
|
||||
}
|
||||
|
||||
void SetBandwidthAllocation(DataRate bandwidth_allocation) {
|
||||
bandwidth_allocation_ = bandwidth_allocation;
|
||||
}
|
||||
|
||||
void SetTargetBitrate(DataRate target_bitrate, int64_t now_ms) {
|
||||
if (set_start_bitrate_ > DataRate::Zero() && !has_seen_first_bwe_drop_ &&
|
||||
quality_scaler_resource_->is_started() &&
|
||||
quality_scaler_settings_.InitialBitrateIntervalMs() &&
|
||||
quality_scaler_settings_.InitialBitrateFactor()) {
|
||||
int64_t diff_ms = now_ms - set_start_bitrate_time_ms_;
|
||||
if (diff_ms <
|
||||
quality_scaler_settings_.InitialBitrateIntervalMs().value() &&
|
||||
(target_bitrate <
|
||||
(set_start_bitrate_ *
|
||||
quality_scaler_settings_.InitialBitrateFactor().value()))) {
|
||||
RTC_LOG(LS_INFO) << "Reset initial_framedrop_. Start bitrate: "
|
||||
<< set_start_bitrate_.bps()
|
||||
<< ", target bitrate: " << target_bitrate.bps();
|
||||
initial_framedrop_ = 0;
|
||||
has_seen_first_bwe_drop_ = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void OnEncoderSettingsUpdated(
|
||||
const VideoCodec& codec,
|
||||
const VideoAdaptationCounters& adaptation_counters) {
|
||||
last_stream_configuration_changed_ = false;
|
||||
std::vector<bool> active_flags = GetActiveLayersFlags(codec);
|
||||
// Check if the source resolution has changed for the external reasons,
|
||||
// i.e. without any adaptation from WebRTC.
|
||||
const bool source_resolution_changed =
|
||||
(last_input_width_ != codec.width ||
|
||||
last_input_height_ != codec.height) &&
|
||||
adaptation_counters.resolution_adaptations ==
|
||||
last_adaptation_counters_.resolution_adaptations;
|
||||
if (!EqualFlags(active_flags, last_active_flags_) ||
|
||||
source_resolution_changed) {
|
||||
// Streams configuration has changed.
|
||||
last_stream_configuration_changed_ = true;
|
||||
// Initial frame drop must be enabled because BWE might be way too low
|
||||
// for the selected resolution.
|
||||
if (quality_scaler_resource_->is_started()) {
|
||||
RTC_LOG(LS_INFO) << "Resetting initial_framedrop_ due to changed "
|
||||
"stream parameters";
|
||||
initial_framedrop_ = 0;
|
||||
if (single_active_stream_pixels_ &&
|
||||
VideoStreamAdapter::GetSingleActiveLayerPixels(codec) >
|
||||
*single_active_stream_pixels_) {
|
||||
// Resolution increased.
|
||||
use_bandwidth_allocation_ = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
last_adaptation_counters_ = adaptation_counters;
|
||||
last_active_flags_ = active_flags;
|
||||
last_input_width_ = codec.width;
|
||||
last_input_height_ = codec.height;
|
||||
single_active_stream_pixels_ =
|
||||
VideoStreamAdapter::GetSingleActiveLayerPixels(codec);
|
||||
}
|
||||
|
||||
void OnFrameDroppedDueToSize() { ++initial_framedrop_; }
|
||||
|
||||
void Disable() {
|
||||
initial_framedrop_ = kMaxInitialFramedrop;
|
||||
use_bandwidth_allocation_ = false;
|
||||
}
|
||||
|
||||
void OnQualityScalerSettingsUpdated() {
|
||||
if (quality_scaler_resource_->is_started()) {
|
||||
// Restart frame drops due to size.
|
||||
initial_framedrop_ = 0;
|
||||
} else {
|
||||
// Quality scaling disabled so we shouldn't drop initial frames.
|
||||
Disable();
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
// The maximum number of frames to drop at beginning of stream to try and
|
||||
// achieve desired bitrate.
|
||||
static const int kMaxInitialFramedrop = 4;
|
||||
|
||||
const rtc::scoped_refptr<QualityScalerResource> quality_scaler_resource_;
|
||||
const QualityScalerSettings quality_scaler_settings_;
|
||||
bool has_seen_first_bwe_drop_;
|
||||
DataRate set_start_bitrate_;
|
||||
int64_t set_start_bitrate_time_ms_;
|
||||
// Counts how many frames we've dropped in the initial framedrop phase.
|
||||
int initial_framedrop_;
|
||||
absl::optional<uint32_t> single_active_stream_pixels_;
|
||||
bool use_bandwidth_allocation_;
|
||||
DataRate bandwidth_allocation_;
|
||||
|
||||
std::vector<bool> last_active_flags_;
|
||||
VideoAdaptationCounters last_adaptation_counters_;
|
||||
int last_input_width_;
|
||||
int last_input_height_;
|
||||
bool last_stream_configuration_changed_;
|
||||
};
|
||||
|
||||
VideoStreamEncoderResourceManager::VideoStreamEncoderResourceManager(
|
||||
VideoStreamInputStateProvider* input_state_provider,
|
||||
VideoStreamEncoderObserver* encoder_stats_observer,
|
||||
Clock* clock,
|
||||
bool experiment_cpu_load_estimator,
|
||||
std::unique_ptr<OveruseFrameDetector> overuse_detector,
|
||||
DegradationPreferenceProvider* degradation_preference_provider,
|
||||
const FieldTrialsView& field_trials)
|
||||
: field_trials_(field_trials),
|
||||
degradation_preference_provider_(degradation_preference_provider),
|
||||
bitrate_constraint_(std::make_unique<BitrateConstraint>()),
|
||||
balanced_constraint_(
|
||||
std::make_unique<BalancedConstraint>(degradation_preference_provider_,
|
||||
field_trials)),
|
||||
encode_usage_resource_(
|
||||
EncodeUsageResource::Create(std::move(overuse_detector))),
|
||||
quality_scaler_resource_(QualityScalerResource::Create()),
|
||||
pixel_limit_resource_(nullptr),
|
||||
bandwidth_quality_scaler_resource_(
|
||||
BandwidthQualityScalerResource::Create()),
|
||||
encoder_queue_(nullptr),
|
||||
input_state_provider_(input_state_provider),
|
||||
adaptation_processor_(nullptr),
|
||||
encoder_stats_observer_(encoder_stats_observer),
|
||||
degradation_preference_(DegradationPreference::DISABLED),
|
||||
video_source_restrictions_(),
|
||||
balanced_settings_(field_trials),
|
||||
clock_(clock),
|
||||
experiment_cpu_load_estimator_(experiment_cpu_load_estimator),
|
||||
initial_frame_dropper_(
|
||||
std::make_unique<InitialFrameDropper>(quality_scaler_resource_,
|
||||
field_trials)),
|
||||
quality_scaling_experiment_enabled_(
|
||||
QualityScalingExperiment::Enabled(field_trials_)),
|
||||
pixel_limit_resource_experiment_enabled_(
|
||||
field_trials.IsEnabled(kPixelLimitResourceFieldTrialName)),
|
||||
encoder_target_bitrate_bps_(absl::nullopt),
|
||||
quality_rampup_experiment_(
|
||||
QualityRampUpExperimentHelper::CreateIfEnabled(this, clock_)),
|
||||
encoder_settings_(absl::nullopt) {
|
||||
TRACE_EVENT0(
|
||||
"webrtc",
|
||||
"VideoStreamEncoderResourceManager::VideoStreamEncoderResourceManager");
|
||||
RTC_CHECK(degradation_preference_provider_);
|
||||
RTC_CHECK(encoder_stats_observer_);
|
||||
}
|
||||
|
||||
VideoStreamEncoderResourceManager::~VideoStreamEncoderResourceManager() =
|
||||
default;
|
||||
|
||||
void VideoStreamEncoderResourceManager::Initialize(
|
||||
TaskQueueBase* encoder_queue) {
|
||||
RTC_DCHECK(!encoder_queue_);
|
||||
RTC_DCHECK(encoder_queue);
|
||||
encoder_queue_ = encoder_queue;
|
||||
encode_usage_resource_->RegisterEncoderTaskQueue(encoder_queue_);
|
||||
quality_scaler_resource_->RegisterEncoderTaskQueue(encoder_queue_);
|
||||
bandwidth_quality_scaler_resource_->RegisterEncoderTaskQueue(encoder_queue_);
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::SetAdaptationProcessor(
|
||||
ResourceAdaptationProcessorInterface* adaptation_processor,
|
||||
VideoStreamAdapter* stream_adapter) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
adaptation_processor_ = adaptation_processor;
|
||||
stream_adapter_ = stream_adapter;
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::SetDegradationPreferences(
|
||||
DegradationPreference degradation_preference) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
degradation_preference_ = degradation_preference;
|
||||
UpdateStatsAdaptationSettings();
|
||||
}
|
||||
|
||||
DegradationPreference
|
||||
VideoStreamEncoderResourceManager::degradation_preference() const {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
return degradation_preference_;
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::ConfigureEncodeUsageResource() {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
RTC_DCHECK(encoder_settings_.has_value());
|
||||
if (encode_usage_resource_->is_started()) {
|
||||
encode_usage_resource_->StopCheckForOveruse();
|
||||
} else {
|
||||
// If the resource has not yet started then it needs to be added.
|
||||
AddResource(encode_usage_resource_, VideoAdaptationReason::kCpu);
|
||||
}
|
||||
encode_usage_resource_->StartCheckForOveruse(GetCpuOveruseOptions());
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::MaybeInitializePixelLimitResource() {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
RTC_DCHECK(adaptation_processor_);
|
||||
RTC_DCHECK(!pixel_limit_resource_);
|
||||
if (!pixel_limit_resource_experiment_enabled_) {
|
||||
// The field trial is not running.
|
||||
return;
|
||||
}
|
||||
int max_pixels = 0;
|
||||
std::string pixel_limit_field_trial =
|
||||
field_trials_.Lookup(kPixelLimitResourceFieldTrialName);
|
||||
if (sscanf(pixel_limit_field_trial.c_str(), "Enabled-%d", &max_pixels) != 1) {
|
||||
RTC_LOG(LS_ERROR) << "Couldn't parse " << kPixelLimitResourceFieldTrialName
|
||||
<< " trial config: " << pixel_limit_field_trial;
|
||||
return;
|
||||
}
|
||||
RTC_LOG(LS_INFO) << "Running field trial "
|
||||
<< kPixelLimitResourceFieldTrialName << " configured to "
|
||||
<< max_pixels << " max pixels";
|
||||
// Configure the specified max pixels from the field trial. The pixel limit
|
||||
// resource is active for the lifetme of the stream (until
|
||||
// StopManagedResources() is called).
|
||||
pixel_limit_resource_ =
|
||||
PixelLimitResource::Create(encoder_queue_, input_state_provider_);
|
||||
pixel_limit_resource_->SetMaxPixels(max_pixels);
|
||||
AddResource(pixel_limit_resource_, VideoAdaptationReason::kCpu);
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::StopManagedResources() {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
RTC_DCHECK(adaptation_processor_);
|
||||
if (encode_usage_resource_->is_started()) {
|
||||
encode_usage_resource_->StopCheckForOveruse();
|
||||
RemoveResource(encode_usage_resource_);
|
||||
}
|
||||
if (quality_scaler_resource_->is_started()) {
|
||||
quality_scaler_resource_->StopCheckForOveruse();
|
||||
RemoveResource(quality_scaler_resource_);
|
||||
}
|
||||
if (pixel_limit_resource_) {
|
||||
RemoveResource(pixel_limit_resource_);
|
||||
pixel_limit_resource_ = nullptr;
|
||||
}
|
||||
if (bandwidth_quality_scaler_resource_->is_started()) {
|
||||
bandwidth_quality_scaler_resource_->StopCheckForOveruse();
|
||||
RemoveResource(bandwidth_quality_scaler_resource_);
|
||||
}
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::AddResource(
|
||||
rtc::scoped_refptr<Resource> resource,
|
||||
VideoAdaptationReason reason) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
RTC_DCHECK(resource);
|
||||
bool inserted;
|
||||
std::tie(std::ignore, inserted) = resources_.emplace(resource, reason);
|
||||
RTC_DCHECK(inserted) << "Resource " << resource->Name()
|
||||
<< " already was inserted";
|
||||
adaptation_processor_->AddResource(resource);
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::RemoveResource(
|
||||
rtc::scoped_refptr<Resource> resource) {
|
||||
{
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
RTC_DCHECK(resource);
|
||||
const auto& it = resources_.find(resource);
|
||||
RTC_DCHECK(it != resources_.end())
|
||||
<< "Resource \"" << resource->Name() << "\" not found.";
|
||||
resources_.erase(it);
|
||||
}
|
||||
adaptation_processor_->RemoveResource(resource);
|
||||
}
|
||||
|
||||
std::vector<AdaptationConstraint*>
|
||||
VideoStreamEncoderResourceManager::AdaptationConstraints() const {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
return {bitrate_constraint_.get(), balanced_constraint_.get()};
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::SetEncoderSettings(
|
||||
EncoderSettings encoder_settings) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
encoder_settings_ = std::move(encoder_settings);
|
||||
bitrate_constraint_->OnEncoderSettingsUpdated(encoder_settings_);
|
||||
initial_frame_dropper_->OnEncoderSettingsUpdated(
|
||||
encoder_settings_->video_codec(), current_adaptation_counters_);
|
||||
MaybeUpdateTargetFrameRate();
|
||||
if (quality_rampup_experiment_) {
|
||||
quality_rampup_experiment_->ConfigureQualityRampupExperiment(
|
||||
initial_frame_dropper_->last_stream_configuration_changed(),
|
||||
initial_frame_dropper_->single_active_stream_pixels(),
|
||||
GetSingleActiveLayerMaxBitrate(encoder_settings_->video_codec()));
|
||||
}
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::SetStartBitrate(
|
||||
DataRate start_bitrate) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
if (!start_bitrate.IsZero()) {
|
||||
encoder_target_bitrate_bps_ = start_bitrate.bps();
|
||||
bitrate_constraint_->OnEncoderTargetBitrateUpdated(
|
||||
encoder_target_bitrate_bps_);
|
||||
balanced_constraint_->OnEncoderTargetBitrateUpdated(
|
||||
encoder_target_bitrate_bps_);
|
||||
}
|
||||
initial_frame_dropper_->SetStartBitrate(start_bitrate,
|
||||
clock_->TimeInMicroseconds());
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::SetTargetBitrate(
|
||||
DataRate target_bitrate) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
if (!target_bitrate.IsZero()) {
|
||||
encoder_target_bitrate_bps_ = target_bitrate.bps();
|
||||
bitrate_constraint_->OnEncoderTargetBitrateUpdated(
|
||||
encoder_target_bitrate_bps_);
|
||||
balanced_constraint_->OnEncoderTargetBitrateUpdated(
|
||||
encoder_target_bitrate_bps_);
|
||||
}
|
||||
initial_frame_dropper_->SetTargetBitrate(target_bitrate,
|
||||
clock_->TimeInMilliseconds());
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::SetEncoderRates(
|
||||
const VideoEncoder::RateControlParameters& encoder_rates) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
encoder_rates_ = encoder_rates;
|
||||
initial_frame_dropper_->SetBandwidthAllocation(
|
||||
encoder_rates.bandwidth_allocation);
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::OnFrameDroppedDueToSize() {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
initial_frame_dropper_->OnFrameDroppedDueToSize();
|
||||
Adaptation reduce_resolution = stream_adapter_->GetAdaptDownResolution();
|
||||
if (reduce_resolution.status() == Adaptation::Status::kValid) {
|
||||
stream_adapter_->ApplyAdaptation(reduce_resolution,
|
||||
quality_scaler_resource_);
|
||||
}
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::OnEncodeStarted(
|
||||
const VideoFrame& cropped_frame,
|
||||
int64_t time_when_first_seen_us) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
encode_usage_resource_->OnEncodeStarted(cropped_frame,
|
||||
time_when_first_seen_us);
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::OnEncodeCompleted(
|
||||
const EncodedImage& encoded_image,
|
||||
int64_t time_sent_in_us,
|
||||
absl::optional<int> encode_duration_us,
|
||||
DataSize frame_size) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
// Inform `encode_usage_resource_` of the encode completed event.
|
||||
uint32_t timestamp = encoded_image.RtpTimestamp();
|
||||
int64_t capture_time_us =
|
||||
encoded_image.capture_time_ms_ * rtc::kNumMicrosecsPerMillisec;
|
||||
encode_usage_resource_->OnEncodeCompleted(
|
||||
timestamp, time_sent_in_us, capture_time_us, encode_duration_us);
|
||||
quality_scaler_resource_->OnEncodeCompleted(encoded_image, time_sent_in_us);
|
||||
bandwidth_quality_scaler_resource_->OnEncodeCompleted(
|
||||
encoded_image, time_sent_in_us, frame_size.bytes());
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::OnFrameDropped(
|
||||
EncodedImageCallback::DropReason reason) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
quality_scaler_resource_->OnFrameDropped(reason);
|
||||
}
|
||||
|
||||
bool VideoStreamEncoderResourceManager::DropInitialFrames() const {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
return initial_frame_dropper_->DropInitialFrames();
|
||||
}
|
||||
|
||||
absl::optional<uint32_t>
|
||||
VideoStreamEncoderResourceManager::SingleActiveStreamPixels() const {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
return initial_frame_dropper_->single_active_stream_pixels();
|
||||
}
|
||||
|
||||
absl::optional<uint32_t>
|
||||
VideoStreamEncoderResourceManager::UseBandwidthAllocationBps() const {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
return initial_frame_dropper_->UseBandwidthAllocationBps();
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::OnMaybeEncodeFrame() {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
initial_frame_dropper_->Disable();
|
||||
if (quality_rampup_experiment_ && quality_scaler_resource_->is_started()) {
|
||||
DataRate bandwidth = encoder_rates_.has_value()
|
||||
? encoder_rates_->bandwidth_allocation
|
||||
: DataRate::Zero();
|
||||
quality_rampup_experiment_->PerformQualityRampupExperiment(
|
||||
quality_scaler_resource_, bandwidth,
|
||||
DataRate::BitsPerSec(encoder_target_bitrate_bps_.value_or(0)),
|
||||
GetSingleActiveLayerMaxBitrate(encoder_settings_->video_codec()));
|
||||
}
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::UpdateQualityScalerSettings(
|
||||
absl::optional<VideoEncoder::QpThresholds> qp_thresholds) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
if (qp_thresholds.has_value()) {
|
||||
if (quality_scaler_resource_->is_started()) {
|
||||
quality_scaler_resource_->SetQpThresholds(qp_thresholds.value());
|
||||
} else {
|
||||
quality_scaler_resource_->StartCheckForOveruse(qp_thresholds.value(),
|
||||
field_trials_);
|
||||
AddResource(quality_scaler_resource_, VideoAdaptationReason::kQuality);
|
||||
}
|
||||
} else if (quality_scaler_resource_->is_started()) {
|
||||
quality_scaler_resource_->StopCheckForOveruse();
|
||||
RemoveResource(quality_scaler_resource_);
|
||||
}
|
||||
initial_frame_dropper_->OnQualityScalerSettingsUpdated();
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::UpdateBandwidthQualityScalerSettings(
|
||||
bool bandwidth_quality_scaling_allowed,
|
||||
const std::vector<VideoEncoder::ResolutionBitrateLimits>&
|
||||
resolution_bitrate_limits) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
|
||||
if (!bandwidth_quality_scaling_allowed) {
|
||||
if (bandwidth_quality_scaler_resource_->is_started()) {
|
||||
bandwidth_quality_scaler_resource_->StopCheckForOveruse();
|
||||
RemoveResource(bandwidth_quality_scaler_resource_);
|
||||
}
|
||||
} else {
|
||||
if (!bandwidth_quality_scaler_resource_->is_started()) {
|
||||
// Before executing "StartCheckForOveruse",we must execute "AddResource"
|
||||
// firstly,because it can make the listener valid.
|
||||
AddResource(bandwidth_quality_scaler_resource_,
|
||||
webrtc::VideoAdaptationReason::kQuality);
|
||||
bandwidth_quality_scaler_resource_->StartCheckForOveruse(
|
||||
resolution_bitrate_limits);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::ConfigureQualityScaler(
|
||||
const VideoEncoder::EncoderInfo& encoder_info) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
const auto scaling_settings = encoder_info.scaling_settings;
|
||||
const bool quality_scaling_allowed =
|
||||
IsResolutionScalingEnabled(degradation_preference_) &&
|
||||
(scaling_settings.thresholds.has_value() ||
|
||||
(encoder_settings_.has_value() &&
|
||||
encoder_settings_->encoder_config().is_quality_scaling_allowed)) &&
|
||||
encoder_info.is_qp_trusted.value_or(true);
|
||||
|
||||
// TODO(https://crbug.com/webrtc/11222): Should this move to
|
||||
// QualityScalerResource?
|
||||
if (quality_scaling_allowed) {
|
||||
if (!quality_scaler_resource_->is_started()) {
|
||||
// Quality scaler has not already been configured.
|
||||
|
||||
// Use experimental thresholds if available.
|
||||
absl::optional<VideoEncoder::QpThresholds> experimental_thresholds;
|
||||
if (quality_scaling_experiment_enabled_) {
|
||||
experimental_thresholds = QualityScalingExperiment::GetQpThresholds(
|
||||
GetVideoCodecTypeOrGeneric(encoder_settings_), field_trials_);
|
||||
}
|
||||
UpdateQualityScalerSettings(experimental_thresholds.has_value()
|
||||
? experimental_thresholds
|
||||
: scaling_settings.thresholds);
|
||||
}
|
||||
} else {
|
||||
UpdateQualityScalerSettings(absl::nullopt);
|
||||
}
|
||||
|
||||
// Set the qp-thresholds to the balanced settings if balanced mode.
|
||||
if (degradation_preference_ == DegradationPreference::BALANCED &&
|
||||
quality_scaler_resource_->is_started()) {
|
||||
absl::optional<VideoEncoder::QpThresholds> thresholds =
|
||||
balanced_settings_.GetQpThresholds(
|
||||
GetVideoCodecTypeOrGeneric(encoder_settings_),
|
||||
LastFrameSizeOrDefault());
|
||||
if (thresholds) {
|
||||
quality_scaler_resource_->SetQpThresholds(*thresholds);
|
||||
}
|
||||
}
|
||||
UpdateStatsAdaptationSettings();
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::ConfigureBandwidthQualityScaler(
|
||||
const VideoEncoder::EncoderInfo& encoder_info) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
const bool bandwidth_quality_scaling_allowed =
|
||||
IsResolutionScalingEnabled(degradation_preference_) &&
|
||||
(encoder_settings_.has_value() &&
|
||||
encoder_settings_->encoder_config().is_quality_scaling_allowed) &&
|
||||
!encoder_info.is_qp_trusted.value_or(true);
|
||||
|
||||
UpdateBandwidthQualityScalerSettings(bandwidth_quality_scaling_allowed,
|
||||
encoder_info.resolution_bitrate_limits);
|
||||
UpdateStatsAdaptationSettings();
|
||||
}
|
||||
|
||||
VideoAdaptationReason VideoStreamEncoderResourceManager::GetReasonFromResource(
|
||||
rtc::scoped_refptr<Resource> resource) const {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
const auto& registered_resource = resources_.find(resource);
|
||||
RTC_DCHECK(registered_resource != resources_.end())
|
||||
<< resource->Name() << " not found.";
|
||||
return registered_resource->second;
|
||||
}
|
||||
|
||||
// TODO(pbos): Lower these thresholds (to closer to 100%) when we handle
|
||||
// pipelining encoders better (multiple input frames before something comes
|
||||
// out). This should effectively turn off CPU adaptations for systems that
|
||||
// remotely cope with the load right now.
|
||||
CpuOveruseOptions VideoStreamEncoderResourceManager::GetCpuOveruseOptions()
|
||||
const {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
// This is already ensured by the only caller of this method:
|
||||
// StartResourceAdaptation().
|
||||
RTC_DCHECK(encoder_settings_.has_value());
|
||||
CpuOveruseOptions options;
|
||||
// Hardware accelerated encoders are assumed to be pipelined; give them
|
||||
// additional overuse time.
|
||||
if (encoder_settings_->encoder_info().is_hardware_accelerated) {
|
||||
options.low_encode_usage_threshold_percent = 150;
|
||||
options.high_encode_usage_threshold_percent = 200;
|
||||
}
|
||||
if (experiment_cpu_load_estimator_) {
|
||||
options.filter_time_ms = 5 * rtc::kNumMillisecsPerSec;
|
||||
}
|
||||
return options;
|
||||
}
|
||||
|
||||
int VideoStreamEncoderResourceManager::LastFrameSizeOrDefault() const {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
return input_state_provider_->InputState()
|
||||
.single_active_stream_pixels()
|
||||
.value_or(
|
||||
input_state_provider_->InputState().frame_size_pixels().value_or(
|
||||
kDefaultInputPixelsWidth * kDefaultInputPixelsHeight));
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::OnVideoSourceRestrictionsUpdated(
|
||||
VideoSourceRestrictions restrictions,
|
||||
const VideoAdaptationCounters& adaptation_counters,
|
||||
rtc::scoped_refptr<Resource> reason,
|
||||
const VideoSourceRestrictions& unfiltered_restrictions) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
current_adaptation_counters_ = adaptation_counters;
|
||||
|
||||
// TODO(bugs.webrtc.org/11553) Remove reason parameter and add reset callback.
|
||||
if (!reason && adaptation_counters.Total() == 0) {
|
||||
// Adaptation was manually reset - clear the per-reason counters too.
|
||||
encoder_stats_observer_->ClearAdaptationStats();
|
||||
}
|
||||
|
||||
video_source_restrictions_ = FilterRestrictionsByDegradationPreference(
|
||||
restrictions, degradation_preference_);
|
||||
MaybeUpdateTargetFrameRate();
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::OnResourceLimitationChanged(
|
||||
rtc::scoped_refptr<Resource> resource,
|
||||
const std::map<rtc::scoped_refptr<Resource>, VideoAdaptationCounters>&
|
||||
resource_limitations) {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
if (!resource) {
|
||||
encoder_stats_observer_->ClearAdaptationStats();
|
||||
return;
|
||||
}
|
||||
|
||||
std::map<VideoAdaptationReason, VideoAdaptationCounters> limitations;
|
||||
for (auto& resource_counter : resource_limitations) {
|
||||
std::map<VideoAdaptationReason, VideoAdaptationCounters>::iterator it;
|
||||
bool inserted;
|
||||
std::tie(it, inserted) = limitations.emplace(
|
||||
GetReasonFromResource(resource_counter.first), resource_counter.second);
|
||||
if (!inserted && it->second.Total() < resource_counter.second.Total()) {
|
||||
it->second = resource_counter.second;
|
||||
}
|
||||
}
|
||||
|
||||
VideoAdaptationReason adaptation_reason = GetReasonFromResource(resource);
|
||||
encoder_stats_observer_->OnAdaptationChanged(
|
||||
adaptation_reason, limitations[VideoAdaptationReason::kCpu],
|
||||
limitations[VideoAdaptationReason::kQuality]);
|
||||
|
||||
if (quality_rampup_experiment_) {
|
||||
bool cpu_limited = limitations.at(VideoAdaptationReason::kCpu).Total() > 0;
|
||||
auto qp_resolution_adaptations =
|
||||
limitations.at(VideoAdaptationReason::kQuality).resolution_adaptations;
|
||||
quality_rampup_experiment_->cpu_adapted(cpu_limited);
|
||||
quality_rampup_experiment_->qp_resolution_adaptations(
|
||||
qp_resolution_adaptations);
|
||||
}
|
||||
|
||||
RTC_LOG(LS_INFO) << ActiveCountsToString(limitations);
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::MaybeUpdateTargetFrameRate() {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
absl::optional<double> codec_max_frame_rate =
|
||||
encoder_settings_.has_value()
|
||||
? absl::optional<double>(
|
||||
encoder_settings_->video_codec().maxFramerate)
|
||||
: absl::nullopt;
|
||||
// The current target framerate is the maximum frame rate as specified by
|
||||
// the current codec configuration or any limit imposed by the adaptation
|
||||
// module. This is used to make sure overuse detection doesn't needlessly
|
||||
// trigger in low and/or variable framerate scenarios.
|
||||
absl::optional<double> target_frame_rate =
|
||||
video_source_restrictions_.max_frame_rate();
|
||||
if (!target_frame_rate.has_value() ||
|
||||
(codec_max_frame_rate.has_value() &&
|
||||
codec_max_frame_rate.value() < target_frame_rate.value())) {
|
||||
target_frame_rate = codec_max_frame_rate;
|
||||
}
|
||||
encode_usage_resource_->SetTargetFrameRate(target_frame_rate);
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::UpdateStatsAdaptationSettings() const {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
VideoStreamEncoderObserver::AdaptationSettings cpu_settings(
|
||||
IsResolutionScalingEnabled(degradation_preference_),
|
||||
IsFramerateScalingEnabled(degradation_preference_));
|
||||
|
||||
VideoStreamEncoderObserver::AdaptationSettings quality_settings =
|
||||
(quality_scaler_resource_->is_started() ||
|
||||
bandwidth_quality_scaler_resource_->is_started())
|
||||
? cpu_settings
|
||||
: VideoStreamEncoderObserver::AdaptationSettings();
|
||||
encoder_stats_observer_->UpdateAdaptationSettings(cpu_settings,
|
||||
quality_settings);
|
||||
}
|
||||
|
||||
// static
|
||||
std::string VideoStreamEncoderResourceManager::ActiveCountsToString(
|
||||
const std::map<VideoAdaptationReason, VideoAdaptationCounters>&
|
||||
active_counts) {
|
||||
rtc::StringBuilder ss;
|
||||
|
||||
ss << "Downgrade counts: fps: {";
|
||||
for (auto& reason_count : active_counts) {
|
||||
ss << ToString(reason_count.first) << ":";
|
||||
ss << reason_count.second.fps_adaptations;
|
||||
}
|
||||
ss << "}, resolution {";
|
||||
for (auto& reason_count : active_counts) {
|
||||
ss << ToString(reason_count.first) << ":";
|
||||
ss << reason_count.second.resolution_adaptations;
|
||||
}
|
||||
ss << "}";
|
||||
|
||||
return ss.Release();
|
||||
}
|
||||
|
||||
void VideoStreamEncoderResourceManager::OnQualityRampUp() {
|
||||
RTC_DCHECK_RUN_ON(encoder_queue_);
|
||||
stream_adapter_->ClearRestrictions();
|
||||
quality_rampup_experiment_.reset();
|
||||
}
|
||||
|
||||
bool VideoStreamEncoderResourceManager::IsSimulcastOrMultipleSpatialLayers(
|
||||
const VideoEncoderConfig& encoder_config,
|
||||
const VideoCodec& video_codec) {
|
||||
const std::vector<VideoStream>& simulcast_layers =
|
||||
encoder_config.simulcast_layers;
|
||||
if (simulcast_layers.empty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
absl::optional<int> num_spatial_layers;
|
||||
if (simulcast_layers[0].scalability_mode.has_value() &&
|
||||
video_codec.numberOfSimulcastStreams == 1) {
|
||||
num_spatial_layers = ScalabilityModeToNumSpatialLayers(
|
||||
*simulcast_layers[0].scalability_mode);
|
||||
}
|
||||
|
||||
if (simulcast_layers.size() == 1) {
|
||||
// Check if multiple spatial layers are used.
|
||||
return num_spatial_layers && *num_spatial_layers > 1;
|
||||
}
|
||||
|
||||
bool svc_with_one_spatial_layer =
|
||||
num_spatial_layers && *num_spatial_layers == 1;
|
||||
if (simulcast_layers[0].active && !svc_with_one_spatial_layer) {
|
||||
// We can't distinguish between simulcast and singlecast when only the
|
||||
// lowest spatial layer is active. Treat this case as simulcast.
|
||||
return true;
|
||||
}
|
||||
|
||||
int num_active_layers =
|
||||
std::count_if(simulcast_layers.begin(), simulcast_layers.end(),
|
||||
[](const VideoStream& layer) { return layer.active; });
|
||||
return num_active_layers > 1;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
@ -0,0 +1,239 @@
|
|||
/*
|
||||
* Copyright 2020 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef VIDEO_ADAPTATION_VIDEO_STREAM_ENCODER_RESOURCE_MANAGER_H_
|
||||
#define VIDEO_ADAPTATION_VIDEO_STREAM_ENCODER_RESOURCE_MANAGER_H_
|
||||
|
||||
#include <atomic>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <unordered_map>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/adaptation/resource.h"
|
||||
#include "api/field_trials_view.h"
|
||||
#include "api/rtp_parameters.h"
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/task_queue/task_queue_base.h"
|
||||
#include "api/video/video_adaptation_counters.h"
|
||||
#include "api/video/video_adaptation_reason.h"
|
||||
#include "api/video/video_frame.h"
|
||||
#include "api/video/video_source_interface.h"
|
||||
#include "api/video_codecs/video_codec.h"
|
||||
#include "api/video_codecs/video_encoder.h"
|
||||
#include "call/adaptation/resource_adaptation_processor_interface.h"
|
||||
#include "call/adaptation/video_stream_adapter.h"
|
||||
#include "call/adaptation/video_stream_input_state_provider.h"
|
||||
#include "rtc_base/experiments/quality_scaler_settings.h"
|
||||
#include "rtc_base/ref_count.h"
|
||||
#include "rtc_base/strings/string_builder.h"
|
||||
#include "rtc_base/synchronization/mutex.h"
|
||||
#include "rtc_base/thread_annotations.h"
|
||||
#include "system_wrappers/include/clock.h"
|
||||
#include "video/adaptation/balanced_constraint.h"
|
||||
#include "video/adaptation/bandwidth_quality_scaler_resource.h"
|
||||
#include "video/adaptation/bitrate_constraint.h"
|
||||
#include "video/adaptation/encode_usage_resource.h"
|
||||
#include "video/adaptation/overuse_frame_detector.h"
|
||||
#include "video/adaptation/pixel_limit_resource.h"
|
||||
#include "video/adaptation/quality_rampup_experiment_helper.h"
|
||||
#include "video/adaptation/quality_scaler_resource.h"
|
||||
#include "video/adaptation/video_stream_encoder_resource.h"
|
||||
#include "video/config/video_encoder_config.h"
|
||||
#include "video/video_stream_encoder_observer.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// The assumed input frame size if we have not yet received a frame.
|
||||
// TODO(hbos): This is 144p - why are we assuming super low quality? Seems like
|
||||
// a bad heuristic.
|
||||
extern const int kDefaultInputPixelsWidth;
|
||||
extern const int kDefaultInputPixelsHeight;
|
||||
|
||||
// Owns adaptation-related Resources pertaining to a single VideoStreamEncoder
|
||||
// and passes on the relevant input from the encoder to the resources. The
|
||||
// resources provide resource usage states to the ResourceAdaptationProcessor
|
||||
// which is responsible for reconfiguring streams in order not to overuse
|
||||
// resources.
|
||||
//
|
||||
// The manager is also involved with various mitigations not part of the
|
||||
// ResourceAdaptationProcessor code such as the initial frame dropping.
|
||||
class VideoStreamEncoderResourceManager
|
||||
: public VideoSourceRestrictionsListener,
|
||||
public ResourceLimitationsListener,
|
||||
public QualityRampUpExperimentListener {
|
||||
public:
|
||||
VideoStreamEncoderResourceManager(
|
||||
VideoStreamInputStateProvider* input_state_provider,
|
||||
VideoStreamEncoderObserver* encoder_stats_observer,
|
||||
Clock* clock,
|
||||
bool experiment_cpu_load_estimator,
|
||||
std::unique_ptr<OveruseFrameDetector> overuse_detector,
|
||||
DegradationPreferenceProvider* degradation_preference_provider,
|
||||
const FieldTrialsView& field_trials);
|
||||
~VideoStreamEncoderResourceManager() override;
|
||||
|
||||
void Initialize(TaskQueueBase* encoder_queue);
|
||||
void SetAdaptationProcessor(
|
||||
ResourceAdaptationProcessorInterface* adaptation_processor,
|
||||
VideoStreamAdapter* stream_adapter);
|
||||
|
||||
// TODO(https://crbug.com/webrtc/11563): The degradation preference is a
|
||||
// setting of the Processor, it does not belong to the Manager - can we get
|
||||
// rid of this?
|
||||
void SetDegradationPreferences(DegradationPreference degradation_preference);
|
||||
DegradationPreference degradation_preference() const;
|
||||
|
||||
void ConfigureEncodeUsageResource();
|
||||
// Initializes the pixel limit resource if the "WebRTC-PixelLimitResource"
|
||||
// field trial is enabled. This can be used for testing.
|
||||
void MaybeInitializePixelLimitResource();
|
||||
// Stops the encode usage and quality scaler resources if not already stopped.
|
||||
// If the pixel limit resource was created it is also stopped and nulled.
|
||||
void StopManagedResources();
|
||||
|
||||
// Settings that affect the VideoStreamEncoder-specific resources.
|
||||
void SetEncoderSettings(EncoderSettings encoder_settings);
|
||||
void SetStartBitrate(DataRate start_bitrate);
|
||||
void SetTargetBitrate(DataRate target_bitrate);
|
||||
void SetEncoderRates(
|
||||
const VideoEncoder::RateControlParameters& encoder_rates);
|
||||
// TODO(https://crbug.com/webrtc/11338): This can be made private if we
|
||||
// configure on SetDegredationPreference and SetEncoderSettings.
|
||||
void ConfigureQualityScaler(const VideoEncoder::EncoderInfo& encoder_info);
|
||||
void ConfigureBandwidthQualityScaler(
|
||||
const VideoEncoder::EncoderInfo& encoder_info);
|
||||
|
||||
// Methods corresponding to different points in the encoding pipeline.
|
||||
void OnFrameDroppedDueToSize();
|
||||
void OnMaybeEncodeFrame();
|
||||
void OnEncodeStarted(const VideoFrame& cropped_frame,
|
||||
int64_t time_when_first_seen_us);
|
||||
void OnEncodeCompleted(const EncodedImage& encoded_image,
|
||||
int64_t time_sent_in_us,
|
||||
absl::optional<int> encode_duration_us,
|
||||
DataSize frame_size);
|
||||
void OnFrameDropped(EncodedImageCallback::DropReason reason);
|
||||
|
||||
// Resources need to be mapped to an AdaptReason (kCpu or kQuality) in order
|
||||
// to update legacy getStats().
|
||||
void AddResource(rtc::scoped_refptr<Resource> resource,
|
||||
VideoAdaptationReason reason);
|
||||
void RemoveResource(rtc::scoped_refptr<Resource> resource);
|
||||
std::vector<AdaptationConstraint*> AdaptationConstraints() const;
|
||||
// If true, the VideoStreamEncoder should execute its logic to maybe drop
|
||||
// frames based on size and bitrate.
|
||||
bool DropInitialFrames() const;
|
||||
absl::optional<uint32_t> SingleActiveStreamPixels() const;
|
||||
absl::optional<uint32_t> UseBandwidthAllocationBps() const;
|
||||
|
||||
// VideoSourceRestrictionsListener implementation.
|
||||
// Updates `video_source_restrictions_`.
|
||||
void OnVideoSourceRestrictionsUpdated(
|
||||
VideoSourceRestrictions restrictions,
|
||||
const VideoAdaptationCounters& adaptation_counters,
|
||||
rtc::scoped_refptr<Resource> reason,
|
||||
const VideoSourceRestrictions& unfiltered_restrictions) override;
|
||||
void OnResourceLimitationChanged(
|
||||
rtc::scoped_refptr<Resource> resource,
|
||||
const std::map<rtc::scoped_refptr<Resource>, VideoAdaptationCounters>&
|
||||
resource_limitations) override;
|
||||
|
||||
// QualityRampUpExperimentListener implementation.
|
||||
void OnQualityRampUp() override;
|
||||
|
||||
static bool IsSimulcastOrMultipleSpatialLayers(
|
||||
const VideoEncoderConfig& encoder_config,
|
||||
const VideoCodec& video_codec);
|
||||
|
||||
private:
|
||||
class InitialFrameDropper;
|
||||
|
||||
VideoAdaptationReason GetReasonFromResource(
|
||||
rtc::scoped_refptr<Resource> resource) const;
|
||||
|
||||
CpuOveruseOptions GetCpuOveruseOptions() const;
|
||||
int LastFrameSizeOrDefault() const;
|
||||
|
||||
// Calculates an up-to-date value of the target frame rate and informs the
|
||||
// `encode_usage_resource_` of the new value.
|
||||
void MaybeUpdateTargetFrameRate();
|
||||
|
||||
// Use nullopt to disable quality scaling.
|
||||
void UpdateQualityScalerSettings(
|
||||
absl::optional<VideoEncoder::QpThresholds> qp_thresholds);
|
||||
|
||||
void UpdateBandwidthQualityScalerSettings(
|
||||
bool bandwidth_quality_scaling_allowed,
|
||||
const std::vector<VideoEncoder::ResolutionBitrateLimits>&
|
||||
resolution_bitrate_limits);
|
||||
|
||||
void UpdateStatsAdaptationSettings() const;
|
||||
|
||||
static std::string ActiveCountsToString(
|
||||
const std::map<VideoAdaptationReason, VideoAdaptationCounters>&
|
||||
active_counts);
|
||||
|
||||
const FieldTrialsView& field_trials_;
|
||||
DegradationPreferenceProvider* const degradation_preference_provider_;
|
||||
std::unique_ptr<BitrateConstraint> bitrate_constraint_
|
||||
RTC_GUARDED_BY(encoder_queue_);
|
||||
const std::unique_ptr<BalancedConstraint> balanced_constraint_
|
||||
RTC_GUARDED_BY(encoder_queue_);
|
||||
const rtc::scoped_refptr<EncodeUsageResource> encode_usage_resource_;
|
||||
const rtc::scoped_refptr<QualityScalerResource> quality_scaler_resource_;
|
||||
rtc::scoped_refptr<PixelLimitResource> pixel_limit_resource_;
|
||||
const rtc::scoped_refptr<BandwidthQualityScalerResource>
|
||||
bandwidth_quality_scaler_resource_;
|
||||
|
||||
TaskQueueBase* encoder_queue_;
|
||||
VideoStreamInputStateProvider* const input_state_provider_
|
||||
RTC_GUARDED_BY(encoder_queue_);
|
||||
ResourceAdaptationProcessorInterface* adaptation_processor_;
|
||||
VideoStreamAdapter* stream_adapter_ RTC_GUARDED_BY(encoder_queue_);
|
||||
// Thread-safe.
|
||||
VideoStreamEncoderObserver* const encoder_stats_observer_;
|
||||
|
||||
DegradationPreference degradation_preference_ RTC_GUARDED_BY(encoder_queue_);
|
||||
VideoSourceRestrictions video_source_restrictions_
|
||||
RTC_GUARDED_BY(encoder_queue_);
|
||||
|
||||
VideoAdaptationCounters current_adaptation_counters_
|
||||
RTC_GUARDED_BY(encoder_queue_);
|
||||
|
||||
const BalancedDegradationSettings balanced_settings_;
|
||||
Clock* clock_ RTC_GUARDED_BY(encoder_queue_);
|
||||
const bool experiment_cpu_load_estimator_ RTC_GUARDED_BY(encoder_queue_);
|
||||
const std::unique_ptr<InitialFrameDropper> initial_frame_dropper_
|
||||
RTC_GUARDED_BY(encoder_queue_);
|
||||
const bool quality_scaling_experiment_enabled_ RTC_GUARDED_BY(encoder_queue_);
|
||||
const bool pixel_limit_resource_experiment_enabled_
|
||||
RTC_GUARDED_BY(encoder_queue_);
|
||||
absl::optional<uint32_t> encoder_target_bitrate_bps_
|
||||
RTC_GUARDED_BY(encoder_queue_);
|
||||
absl::optional<VideoEncoder::RateControlParameters> encoder_rates_
|
||||
RTC_GUARDED_BY(encoder_queue_);
|
||||
std::unique_ptr<QualityRampUpExperimentHelper> quality_rampup_experiment_
|
||||
RTC_GUARDED_BY(encoder_queue_);
|
||||
absl::optional<EncoderSettings> encoder_settings_
|
||||
RTC_GUARDED_BY(encoder_queue_);
|
||||
|
||||
// Ties a resource to a reason for statistical reporting. This AdaptReason is
|
||||
// also used by this module to make decisions about how to adapt up/down.
|
||||
std::map<rtc::scoped_refptr<Resource>, VideoAdaptationReason> resources_
|
||||
RTC_GUARDED_BY(encoder_queue_);
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // VIDEO_ADAPTATION_VIDEO_STREAM_ENCODER_RESOURCE_MANAGER_H_
|
||||
Loading…
Add table
Add a link
Reference in a new issue