Repo created
This commit is contained in:
parent
81b91f4139
commit
f8c34fa5ee
22732 changed files with 4815320 additions and 2 deletions
5
TMessagesProj/jni/voip/webrtc/api/video/OWNERS
Normal file
5
TMessagesProj/jni/voip/webrtc/api/video/OWNERS
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
brandtr@webrtc.org
|
||||
magjed@webrtc.org
|
||||
philipel@webrtc.org
|
||||
|
||||
per-file video_timing.h=ilnik@webrtc.org
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
/*
|
||||
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/builtin_video_bitrate_allocator_factory.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "absl/base/attributes.h"
|
||||
#include "absl/base/macros.h"
|
||||
#include "api/video/video_bitrate_allocator.h"
|
||||
#include "api/video_codecs/video_codec.h"
|
||||
#include "modules/video_coding/svc/svc_rate_allocator.h"
|
||||
#include "modules/video_coding/utility/simulcast_rate_allocator.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace {
|
||||
|
||||
class BuiltinVideoBitrateAllocatorFactory
|
||||
: public VideoBitrateAllocatorFactory {
|
||||
public:
|
||||
BuiltinVideoBitrateAllocatorFactory() = default;
|
||||
~BuiltinVideoBitrateAllocatorFactory() override = default;
|
||||
|
||||
std::unique_ptr<VideoBitrateAllocator> CreateVideoBitrateAllocator(
|
||||
const VideoCodec& codec) override {
|
||||
// TODO(https://crbug.com/webrtc/14884): Update SvcRateAllocator to
|
||||
// support simulcast and use it for VP9/AV1 simulcast as well.
|
||||
if ((codec.codecType == kVideoCodecAV1 ||
|
||||
codec.codecType == kVideoCodecVP9) &&
|
||||
codec.numberOfSimulcastStreams <= 1) {
|
||||
return std::make_unique<SvcRateAllocator>(codec);
|
||||
}
|
||||
return std::make_unique<SimulcastRateAllocator>(codec);
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace
|
||||
|
||||
std::unique_ptr<VideoBitrateAllocatorFactory>
|
||||
CreateBuiltinVideoBitrateAllocatorFactory() {
|
||||
return std::make_unique<BuiltinVideoBitrateAllocatorFactory>();
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
/*
|
||||
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_BUILTIN_VIDEO_BITRATE_ALLOCATOR_FACTORY_H_
|
||||
#define API_VIDEO_BUILTIN_VIDEO_BITRATE_ALLOCATOR_FACTORY_H_
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "api/video/video_bitrate_allocator_factory.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
std::unique_ptr<VideoBitrateAllocatorFactory>
|
||||
CreateBuiltinVideoBitrateAllocatorFactory();
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_BUILTIN_VIDEO_BITRATE_ALLOCATOR_FACTORY_H_
|
||||
263
TMessagesProj/jni/voip/webrtc/api/video/color_space.cc
Normal file
263
TMessagesProj/jni/voip/webrtc/api/video/color_space.cc
Normal file
|
|
@ -0,0 +1,263 @@
|
|||
/*
|
||||
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/color_space.h"
|
||||
|
||||
#include "rtc_base/strings/string_builder.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace {
|
||||
// Try to convert `enum_value` into the enum class T. `enum_bitmask` is created
|
||||
// by the funciton below. Returns true if conversion was successful, false
|
||||
// otherwise.
|
||||
template <typename T>
|
||||
bool SetFromUint8(uint8_t enum_value, uint64_t enum_bitmask, T* out) {
|
||||
if ((enum_value < 64) && ((enum_bitmask >> enum_value) & 1)) {
|
||||
*out = static_cast<T>(enum_value);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// This function serves as an assert for the constexpr function below. It's on
|
||||
// purpose not declared as constexpr so that it causes a build problem if enum
|
||||
// values of 64 or above are used. The bitmask and the code generating it would
|
||||
// have to be extended if the standard is updated to include enum values >= 64.
|
||||
int EnumMustBeLessThan64() {
|
||||
return -1;
|
||||
}
|
||||
|
||||
template <typename T, size_t N>
|
||||
constexpr int MakeMask(const int index, const int length, T (&values)[N]) {
|
||||
return length > 1
|
||||
? (MakeMask(index, 1, values) +
|
||||
MakeMask(index + 1, length - 1, values))
|
||||
: (static_cast<uint8_t>(values[index]) < 64
|
||||
? (uint64_t{1} << static_cast<uint8_t>(values[index]))
|
||||
: EnumMustBeLessThan64());
|
||||
}
|
||||
|
||||
// Create a bitmask where each bit corresponds to one potential enum value.
|
||||
// `values` should be an array listing all possible enum values. The bit is set
|
||||
// to one if the corresponding enum exists. Only works for enums with values
|
||||
// less than 64.
|
||||
template <typename T, size_t N>
|
||||
constexpr uint64_t CreateEnumBitmask(T (&values)[N]) {
|
||||
return MakeMask(0, N, values);
|
||||
}
|
||||
|
||||
bool SetChromaSitingFromUint8(uint8_t enum_value,
|
||||
ColorSpace::ChromaSiting* chroma_siting) {
|
||||
constexpr ColorSpace::ChromaSiting kChromaSitings[] = {
|
||||
ColorSpace::ChromaSiting::kUnspecified,
|
||||
ColorSpace::ChromaSiting::kCollocated, ColorSpace::ChromaSiting::kHalf};
|
||||
constexpr uint64_t enum_bitmask = CreateEnumBitmask(kChromaSitings);
|
||||
|
||||
return SetFromUint8(enum_value, enum_bitmask, chroma_siting);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
ColorSpace::ColorSpace() = default;
|
||||
ColorSpace::ColorSpace(const ColorSpace& other) = default;
|
||||
ColorSpace::ColorSpace(ColorSpace&& other) = default;
|
||||
ColorSpace& ColorSpace::operator=(const ColorSpace& other) = default;
|
||||
|
||||
ColorSpace::ColorSpace(PrimaryID primaries,
|
||||
TransferID transfer,
|
||||
MatrixID matrix,
|
||||
RangeID range)
|
||||
: ColorSpace(primaries,
|
||||
transfer,
|
||||
matrix,
|
||||
range,
|
||||
ChromaSiting::kUnspecified,
|
||||
ChromaSiting::kUnspecified,
|
||||
nullptr) {}
|
||||
|
||||
ColorSpace::ColorSpace(PrimaryID primaries,
|
||||
TransferID transfer,
|
||||
MatrixID matrix,
|
||||
RangeID range,
|
||||
ChromaSiting chroma_siting_horz,
|
||||
ChromaSiting chroma_siting_vert,
|
||||
const HdrMetadata* hdr_metadata)
|
||||
: primaries_(primaries),
|
||||
transfer_(transfer),
|
||||
matrix_(matrix),
|
||||
range_(range),
|
||||
chroma_siting_horizontal_(chroma_siting_horz),
|
||||
chroma_siting_vertical_(chroma_siting_vert),
|
||||
hdr_metadata_(hdr_metadata ? absl::make_optional(*hdr_metadata)
|
||||
: absl::nullopt) {}
|
||||
|
||||
ColorSpace::PrimaryID ColorSpace::primaries() const {
|
||||
return primaries_;
|
||||
}
|
||||
|
||||
ColorSpace::TransferID ColorSpace::transfer() const {
|
||||
return transfer_;
|
||||
}
|
||||
|
||||
ColorSpace::MatrixID ColorSpace::matrix() const {
|
||||
return matrix_;
|
||||
}
|
||||
|
||||
ColorSpace::RangeID ColorSpace::range() const {
|
||||
return range_;
|
||||
}
|
||||
|
||||
ColorSpace::ChromaSiting ColorSpace::chroma_siting_horizontal() const {
|
||||
return chroma_siting_horizontal_;
|
||||
}
|
||||
|
||||
ColorSpace::ChromaSiting ColorSpace::chroma_siting_vertical() const {
|
||||
return chroma_siting_vertical_;
|
||||
}
|
||||
|
||||
const HdrMetadata* ColorSpace::hdr_metadata() const {
|
||||
return hdr_metadata_ ? &*hdr_metadata_ : nullptr;
|
||||
}
|
||||
|
||||
#define PRINT_ENUM_CASE(TYPE, NAME) \
|
||||
case TYPE::NAME: \
|
||||
ss << #NAME; \
|
||||
break;
|
||||
|
||||
std::string ColorSpace::AsString() const {
|
||||
char buf[1024];
|
||||
rtc::SimpleStringBuilder ss(buf);
|
||||
ss << "{primaries:";
|
||||
switch (primaries_) {
|
||||
PRINT_ENUM_CASE(PrimaryID, kBT709)
|
||||
PRINT_ENUM_CASE(PrimaryID, kUnspecified)
|
||||
PRINT_ENUM_CASE(PrimaryID, kBT470M)
|
||||
PRINT_ENUM_CASE(PrimaryID, kBT470BG)
|
||||
PRINT_ENUM_CASE(PrimaryID, kSMPTE170M)
|
||||
PRINT_ENUM_CASE(PrimaryID, kSMPTE240M)
|
||||
PRINT_ENUM_CASE(PrimaryID, kFILM)
|
||||
PRINT_ENUM_CASE(PrimaryID, kBT2020)
|
||||
PRINT_ENUM_CASE(PrimaryID, kSMPTEST428)
|
||||
PRINT_ENUM_CASE(PrimaryID, kSMPTEST431)
|
||||
PRINT_ENUM_CASE(PrimaryID, kSMPTEST432)
|
||||
PRINT_ENUM_CASE(PrimaryID, kJEDECP22)
|
||||
}
|
||||
ss << ", transfer:";
|
||||
switch (transfer_) {
|
||||
PRINT_ENUM_CASE(TransferID, kBT709)
|
||||
PRINT_ENUM_CASE(TransferID, kUnspecified)
|
||||
PRINT_ENUM_CASE(TransferID, kGAMMA22)
|
||||
PRINT_ENUM_CASE(TransferID, kGAMMA28)
|
||||
PRINT_ENUM_CASE(TransferID, kSMPTE170M)
|
||||
PRINT_ENUM_CASE(TransferID, kSMPTE240M)
|
||||
PRINT_ENUM_CASE(TransferID, kLINEAR)
|
||||
PRINT_ENUM_CASE(TransferID, kLOG)
|
||||
PRINT_ENUM_CASE(TransferID, kLOG_SQRT)
|
||||
PRINT_ENUM_CASE(TransferID, kIEC61966_2_4)
|
||||
PRINT_ENUM_CASE(TransferID, kBT1361_ECG)
|
||||
PRINT_ENUM_CASE(TransferID, kIEC61966_2_1)
|
||||
PRINT_ENUM_CASE(TransferID, kBT2020_10)
|
||||
PRINT_ENUM_CASE(TransferID, kBT2020_12)
|
||||
PRINT_ENUM_CASE(TransferID, kSMPTEST2084)
|
||||
PRINT_ENUM_CASE(TransferID, kSMPTEST428)
|
||||
PRINT_ENUM_CASE(TransferID, kARIB_STD_B67)
|
||||
}
|
||||
ss << ", matrix:";
|
||||
switch (matrix_) {
|
||||
PRINT_ENUM_CASE(MatrixID, kRGB)
|
||||
PRINT_ENUM_CASE(MatrixID, kBT709)
|
||||
PRINT_ENUM_CASE(MatrixID, kUnspecified)
|
||||
PRINT_ENUM_CASE(MatrixID, kFCC)
|
||||
PRINT_ENUM_CASE(MatrixID, kBT470BG)
|
||||
PRINT_ENUM_CASE(MatrixID, kSMPTE170M)
|
||||
PRINT_ENUM_CASE(MatrixID, kSMPTE240M)
|
||||
PRINT_ENUM_CASE(MatrixID, kYCOCG)
|
||||
PRINT_ENUM_CASE(MatrixID, kBT2020_NCL)
|
||||
PRINT_ENUM_CASE(MatrixID, kBT2020_CL)
|
||||
PRINT_ENUM_CASE(MatrixID, kSMPTE2085)
|
||||
PRINT_ENUM_CASE(MatrixID, kCDNCLS)
|
||||
PRINT_ENUM_CASE(MatrixID, kCDCLS)
|
||||
PRINT_ENUM_CASE(MatrixID, kBT2100_ICTCP)
|
||||
}
|
||||
|
||||
ss << ", range:";
|
||||
switch (range_) {
|
||||
PRINT_ENUM_CASE(RangeID, kInvalid)
|
||||
PRINT_ENUM_CASE(RangeID, kLimited)
|
||||
PRINT_ENUM_CASE(RangeID, kFull)
|
||||
PRINT_ENUM_CASE(RangeID, kDerived)
|
||||
}
|
||||
ss << "}";
|
||||
return ss.str();
|
||||
}
|
||||
|
||||
#undef PRINT_ENUM_CASE
|
||||
|
||||
bool ColorSpace::set_primaries_from_uint8(uint8_t enum_value) {
|
||||
constexpr PrimaryID kPrimaryIds[] = {
|
||||
PrimaryID::kBT709, PrimaryID::kUnspecified, PrimaryID::kBT470M,
|
||||
PrimaryID::kBT470BG, PrimaryID::kSMPTE170M, PrimaryID::kSMPTE240M,
|
||||
PrimaryID::kFILM, PrimaryID::kBT2020, PrimaryID::kSMPTEST428,
|
||||
PrimaryID::kSMPTEST431, PrimaryID::kSMPTEST432, PrimaryID::kJEDECP22};
|
||||
constexpr uint64_t enum_bitmask = CreateEnumBitmask(kPrimaryIds);
|
||||
|
||||
return SetFromUint8(enum_value, enum_bitmask, &primaries_);
|
||||
}
|
||||
|
||||
bool ColorSpace::set_transfer_from_uint8(uint8_t enum_value) {
|
||||
constexpr TransferID kTransferIds[] = {
|
||||
TransferID::kBT709, TransferID::kUnspecified,
|
||||
TransferID::kGAMMA22, TransferID::kGAMMA28,
|
||||
TransferID::kSMPTE170M, TransferID::kSMPTE240M,
|
||||
TransferID::kLINEAR, TransferID::kLOG,
|
||||
TransferID::kLOG_SQRT, TransferID::kIEC61966_2_4,
|
||||
TransferID::kBT1361_ECG, TransferID::kIEC61966_2_1,
|
||||
TransferID::kBT2020_10, TransferID::kBT2020_12,
|
||||
TransferID::kSMPTEST2084, TransferID::kSMPTEST428,
|
||||
TransferID::kARIB_STD_B67};
|
||||
constexpr uint64_t enum_bitmask = CreateEnumBitmask(kTransferIds);
|
||||
|
||||
return SetFromUint8(enum_value, enum_bitmask, &transfer_);
|
||||
}
|
||||
|
||||
bool ColorSpace::set_matrix_from_uint8(uint8_t enum_value) {
|
||||
constexpr MatrixID kMatrixIds[] = {
|
||||
MatrixID::kRGB, MatrixID::kBT709, MatrixID::kUnspecified,
|
||||
MatrixID::kFCC, MatrixID::kBT470BG, MatrixID::kSMPTE170M,
|
||||
MatrixID::kSMPTE240M, MatrixID::kYCOCG, MatrixID::kBT2020_NCL,
|
||||
MatrixID::kBT2020_CL, MatrixID::kSMPTE2085, MatrixID::kCDNCLS,
|
||||
MatrixID::kCDCLS, MatrixID::kBT2100_ICTCP};
|
||||
constexpr uint64_t enum_bitmask = CreateEnumBitmask(kMatrixIds);
|
||||
|
||||
return SetFromUint8(enum_value, enum_bitmask, &matrix_);
|
||||
}
|
||||
|
||||
bool ColorSpace::set_range_from_uint8(uint8_t enum_value) {
|
||||
constexpr RangeID kRangeIds[] = {RangeID::kInvalid, RangeID::kLimited,
|
||||
RangeID::kFull, RangeID::kDerived};
|
||||
constexpr uint64_t enum_bitmask = CreateEnumBitmask(kRangeIds);
|
||||
|
||||
return SetFromUint8(enum_value, enum_bitmask, &range_);
|
||||
}
|
||||
|
||||
bool ColorSpace::set_chroma_siting_horizontal_from_uint8(uint8_t enum_value) {
|
||||
return SetChromaSitingFromUint8(enum_value, &chroma_siting_horizontal_);
|
||||
}
|
||||
|
||||
bool ColorSpace::set_chroma_siting_vertical_from_uint8(uint8_t enum_value) {
|
||||
return SetChromaSitingFromUint8(enum_value, &chroma_siting_vertical_);
|
||||
}
|
||||
|
||||
void ColorSpace::set_hdr_metadata(const HdrMetadata* hdr_metadata) {
|
||||
hdr_metadata_ =
|
||||
hdr_metadata ? absl::make_optional(*hdr_metadata) : absl::nullopt;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
181
TMessagesProj/jni/voip/webrtc/api/video/color_space.h
Normal file
181
TMessagesProj/jni/voip/webrtc/api/video/color_space.h
Normal file
|
|
@ -0,0 +1,181 @@
|
|||
/*
|
||||
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_COLOR_SPACE_H_
|
||||
#define API_VIDEO_COLOR_SPACE_H_
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/video/hdr_metadata.h"
|
||||
#include "rtc_base/system/rtc_export.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// This class represents color information as specified in T-REC H.273,
|
||||
// available from https://www.itu.int/rec/T-REC-H.273.
|
||||
//
|
||||
// WebRTC's supported codecs:
|
||||
// - VP9 supports color profiles, see VP9 Bitstream & Decoding Process
|
||||
// Specification Version 0.6 Section 7.2.2 "Color config semantics" available
|
||||
// from https://www.webmproject.org.
|
||||
// - VP8 only supports BT.601, see
|
||||
// https://tools.ietf.org/html/rfc6386#section-9.2
|
||||
// - H264 uses the exact same representation as T-REC H.273. See T-REC-H.264
|
||||
// E.2.1, "VUI parameters semantics", available from
|
||||
// https://www.itu.int/rec/T-REC-H.264.
|
||||
|
||||
class RTC_EXPORT ColorSpace {
|
||||
public:
|
||||
enum class PrimaryID : uint8_t {
|
||||
// The indices are equal to the values specified in T-REC H.273 Table 2.
|
||||
kBT709 = 1,
|
||||
kUnspecified = 2,
|
||||
kBT470M = 4,
|
||||
kBT470BG = 5,
|
||||
kSMPTE170M = 6, // Identical to BT601
|
||||
kSMPTE240M = 7,
|
||||
kFILM = 8,
|
||||
kBT2020 = 9,
|
||||
kSMPTEST428 = 10,
|
||||
kSMPTEST431 = 11,
|
||||
kSMPTEST432 = 12,
|
||||
kJEDECP22 = 22, // Identical to EBU3213-E
|
||||
// When adding/removing entries here, please make sure to do the
|
||||
// corresponding change to kPrimaryIds.
|
||||
};
|
||||
|
||||
enum class TransferID : uint8_t {
|
||||
// The indices are equal to the values specified in T-REC H.273 Table 3.
|
||||
kBT709 = 1,
|
||||
kUnspecified = 2,
|
||||
kGAMMA22 = 4,
|
||||
kGAMMA28 = 5,
|
||||
kSMPTE170M = 6,
|
||||
kSMPTE240M = 7,
|
||||
kLINEAR = 8,
|
||||
kLOG = 9,
|
||||
kLOG_SQRT = 10,
|
||||
kIEC61966_2_4 = 11,
|
||||
kBT1361_ECG = 12,
|
||||
kIEC61966_2_1 = 13,
|
||||
kBT2020_10 = 14,
|
||||
kBT2020_12 = 15,
|
||||
kSMPTEST2084 = 16,
|
||||
kSMPTEST428 = 17,
|
||||
kARIB_STD_B67 = 18,
|
||||
// When adding/removing entries here, please make sure to do the
|
||||
// corresponding change to kTransferIds.
|
||||
};
|
||||
|
||||
enum class MatrixID : uint8_t {
|
||||
// The indices are equal to the values specified in T-REC H.273 Table 4.
|
||||
kRGB = 0,
|
||||
kBT709 = 1,
|
||||
kUnspecified = 2,
|
||||
kFCC = 4,
|
||||
kBT470BG = 5,
|
||||
kSMPTE170M = 6,
|
||||
kSMPTE240M = 7,
|
||||
kYCOCG = 8,
|
||||
kBT2020_NCL = 9,
|
||||
kBT2020_CL = 10,
|
||||
kSMPTE2085 = 11,
|
||||
kCDNCLS = 12,
|
||||
kCDCLS = 13,
|
||||
kBT2100_ICTCP = 14,
|
||||
// When adding/removing entries here, please make sure to do the
|
||||
// corresponding change to kMatrixIds.
|
||||
};
|
||||
|
||||
enum class RangeID {
|
||||
// The indices are equal to the values specified at
|
||||
// https://www.webmproject.org/docs/container/#colour for the element Range.
|
||||
kInvalid = 0,
|
||||
// Limited Rec. 709 color range with RGB values ranging from 16 to 235.
|
||||
kLimited = 1,
|
||||
// Full RGB color range with RGB values from 0 to 255.
|
||||
kFull = 2,
|
||||
// Range is defined by MatrixCoefficients/TransferCharacteristics.
|
||||
kDerived = 3,
|
||||
// When adding/removing entries here, please make sure to do the
|
||||
// corresponding change to kRangeIds.
|
||||
};
|
||||
|
||||
enum class ChromaSiting {
|
||||
// Chroma siting specifies how chroma is subsampled relative to the luma
|
||||
// samples in a YUV video frame.
|
||||
// The indices are equal to the values specified at
|
||||
// https://www.webmproject.org/docs/container/#colour for the element
|
||||
// ChromaSitingVert and ChromaSitingHorz.
|
||||
kUnspecified = 0,
|
||||
kCollocated = 1,
|
||||
kHalf = 2,
|
||||
// When adding/removing entries here, please make sure to do the
|
||||
// corresponding change to kChromaSitings.
|
||||
};
|
||||
|
||||
ColorSpace();
|
||||
ColorSpace(const ColorSpace& other);
|
||||
ColorSpace(ColorSpace&& other);
|
||||
ColorSpace& operator=(const ColorSpace& other);
|
||||
ColorSpace(PrimaryID primaries,
|
||||
TransferID transfer,
|
||||
MatrixID matrix,
|
||||
RangeID range);
|
||||
ColorSpace(PrimaryID primaries,
|
||||
TransferID transfer,
|
||||
MatrixID matrix,
|
||||
RangeID range,
|
||||
ChromaSiting chroma_siting_horizontal,
|
||||
ChromaSiting chroma_siting_vertical,
|
||||
const HdrMetadata* hdr_metadata);
|
||||
friend bool operator==(const ColorSpace& lhs, const ColorSpace& rhs) {
|
||||
return lhs.primaries_ == rhs.primaries_ && lhs.transfer_ == rhs.transfer_ &&
|
||||
lhs.matrix_ == rhs.matrix_ && lhs.range_ == rhs.range_ &&
|
||||
lhs.chroma_siting_horizontal_ == rhs.chroma_siting_horizontal_ &&
|
||||
lhs.chroma_siting_vertical_ == rhs.chroma_siting_vertical_ &&
|
||||
lhs.hdr_metadata_ == rhs.hdr_metadata_;
|
||||
}
|
||||
friend bool operator!=(const ColorSpace& lhs, const ColorSpace& rhs) {
|
||||
return !(lhs == rhs);
|
||||
}
|
||||
|
||||
PrimaryID primaries() const;
|
||||
TransferID transfer() const;
|
||||
MatrixID matrix() const;
|
||||
RangeID range() const;
|
||||
ChromaSiting chroma_siting_horizontal() const;
|
||||
ChromaSiting chroma_siting_vertical() const;
|
||||
const HdrMetadata* hdr_metadata() const;
|
||||
std::string AsString() const;
|
||||
|
||||
bool set_primaries_from_uint8(uint8_t enum_value);
|
||||
bool set_transfer_from_uint8(uint8_t enum_value);
|
||||
bool set_matrix_from_uint8(uint8_t enum_value);
|
||||
bool set_range_from_uint8(uint8_t enum_value);
|
||||
bool set_chroma_siting_horizontal_from_uint8(uint8_t enum_value);
|
||||
bool set_chroma_siting_vertical_from_uint8(uint8_t enum_value);
|
||||
void set_hdr_metadata(const HdrMetadata* hdr_metadata);
|
||||
|
||||
private:
|
||||
PrimaryID primaries_ = PrimaryID::kUnspecified;
|
||||
TransferID transfer_ = TransferID::kUnspecified;
|
||||
MatrixID matrix_ = MatrixID::kUnspecified;
|
||||
RangeID range_ = RangeID::kInvalid;
|
||||
ChromaSiting chroma_siting_horizontal_ = ChromaSiting::kUnspecified;
|
||||
ChromaSiting chroma_siting_vertical_ = ChromaSiting::kUnspecified;
|
||||
absl::optional<HdrMetadata> hdr_metadata_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
#endif // API_VIDEO_COLOR_SPACE_H_
|
||||
134
TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.cc
Normal file
134
TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.cc
Normal file
|
|
@ -0,0 +1,134 @@
|
|||
/*
|
||||
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/encoded_frame.h"
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "modules/rtp_rtcp/source/rtp_video_header.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
absl::optional<Timestamp> EncodedFrame::ReceivedTimestamp() const {
|
||||
return ReceivedTime() >= 0
|
||||
? absl::make_optional(Timestamp::Millis(ReceivedTime()))
|
||||
: absl::nullopt;
|
||||
}
|
||||
|
||||
absl::optional<Timestamp> EncodedFrame::RenderTimestamp() const {
|
||||
return RenderTimeMs() >= 0
|
||||
? absl::make_optional(Timestamp::Millis(RenderTimeMs()))
|
||||
: absl::nullopt;
|
||||
}
|
||||
|
||||
bool EncodedFrame::delayed_by_retransmission() const {
|
||||
return false;
|
||||
}
|
||||
|
||||
void EncodedFrame::CopyCodecSpecific(const RTPVideoHeader* header) {
|
||||
if (header) {
|
||||
switch (header->codec) {
|
||||
case kVideoCodecVP8: {
|
||||
const auto& vp8_header =
|
||||
absl::get<RTPVideoHeaderVP8>(header->video_type_header);
|
||||
if (_codecSpecificInfo.codecType != kVideoCodecVP8) {
|
||||
// This is the first packet for this frame.
|
||||
_codecSpecificInfo.codecSpecific.VP8.temporalIdx = 0;
|
||||
_codecSpecificInfo.codecSpecific.VP8.layerSync = false;
|
||||
_codecSpecificInfo.codecSpecific.VP8.keyIdx = -1;
|
||||
_codecSpecificInfo.codecType = kVideoCodecVP8;
|
||||
}
|
||||
_codecSpecificInfo.codecSpecific.VP8.nonReference =
|
||||
vp8_header.nonReference;
|
||||
if (vp8_header.temporalIdx != kNoTemporalIdx) {
|
||||
_codecSpecificInfo.codecSpecific.VP8.temporalIdx =
|
||||
vp8_header.temporalIdx;
|
||||
_codecSpecificInfo.codecSpecific.VP8.layerSync = vp8_header.layerSync;
|
||||
}
|
||||
if (vp8_header.keyIdx != kNoKeyIdx) {
|
||||
_codecSpecificInfo.codecSpecific.VP8.keyIdx = vp8_header.keyIdx;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case kVideoCodecVP9: {
|
||||
const auto& vp9_header =
|
||||
absl::get<RTPVideoHeaderVP9>(header->video_type_header);
|
||||
if (_codecSpecificInfo.codecType != kVideoCodecVP9) {
|
||||
// This is the first packet for this frame.
|
||||
_codecSpecificInfo.codecSpecific.VP9.temporal_idx = 0;
|
||||
_codecSpecificInfo.codecSpecific.VP9.gof_idx = 0;
|
||||
_codecSpecificInfo.codecSpecific.VP9.inter_layer_predicted = false;
|
||||
_codecSpecificInfo.codecType = kVideoCodecVP9;
|
||||
}
|
||||
_codecSpecificInfo.codecSpecific.VP9.inter_pic_predicted =
|
||||
vp9_header.inter_pic_predicted;
|
||||
_codecSpecificInfo.codecSpecific.VP9.flexible_mode =
|
||||
vp9_header.flexible_mode;
|
||||
_codecSpecificInfo.codecSpecific.VP9.num_ref_pics =
|
||||
vp9_header.num_ref_pics;
|
||||
for (uint8_t r = 0; r < vp9_header.num_ref_pics; ++r) {
|
||||
_codecSpecificInfo.codecSpecific.VP9.p_diff[r] =
|
||||
vp9_header.pid_diff[r];
|
||||
}
|
||||
_codecSpecificInfo.codecSpecific.VP9.ss_data_available =
|
||||
vp9_header.ss_data_available;
|
||||
if (vp9_header.temporal_idx != kNoTemporalIdx) {
|
||||
_codecSpecificInfo.codecSpecific.VP9.temporal_idx =
|
||||
vp9_header.temporal_idx;
|
||||
_codecSpecificInfo.codecSpecific.VP9.temporal_up_switch =
|
||||
vp9_header.temporal_up_switch;
|
||||
}
|
||||
if (vp9_header.spatial_idx != kNoSpatialIdx) {
|
||||
_codecSpecificInfo.codecSpecific.VP9.inter_layer_predicted =
|
||||
vp9_header.inter_layer_predicted;
|
||||
SetSpatialIndex(vp9_header.spatial_idx);
|
||||
}
|
||||
if (vp9_header.gof_idx != kNoGofIdx) {
|
||||
_codecSpecificInfo.codecSpecific.VP9.gof_idx = vp9_header.gof_idx;
|
||||
}
|
||||
if (vp9_header.ss_data_available) {
|
||||
_codecSpecificInfo.codecSpecific.VP9.num_spatial_layers =
|
||||
vp9_header.num_spatial_layers;
|
||||
_codecSpecificInfo.codecSpecific.VP9
|
||||
.spatial_layer_resolution_present =
|
||||
vp9_header.spatial_layer_resolution_present;
|
||||
if (vp9_header.spatial_layer_resolution_present) {
|
||||
for (size_t i = 0; i < vp9_header.num_spatial_layers; ++i) {
|
||||
_codecSpecificInfo.codecSpecific.VP9.width[i] =
|
||||
vp9_header.width[i];
|
||||
_codecSpecificInfo.codecSpecific.VP9.height[i] =
|
||||
vp9_header.height[i];
|
||||
}
|
||||
}
|
||||
_codecSpecificInfo.codecSpecific.VP9.gof.CopyGofInfoVP9(
|
||||
vp9_header.gof);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case kVideoCodecH264: {
|
||||
_codecSpecificInfo.codecType = kVideoCodecH264;
|
||||
break;
|
||||
}
|
||||
case kVideoCodecH265: {
|
||||
_codecSpecificInfo.codecType = kVideoCodecH265;
|
||||
break;
|
||||
}
|
||||
case kVideoCodecAV1: {
|
||||
_codecSpecificInfo.codecType = kVideoCodecAV1;
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
_codecSpecificInfo.codecType = kVideoCodecGeneric;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
106
TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.h
Normal file
106
TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.h
Normal file
|
|
@ -0,0 +1,106 @@
|
|||
/*
|
||||
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_ENCODED_FRAME_H_
|
||||
#define API_VIDEO_ENCODED_FRAME_H_
|
||||
|
||||
#include <stddef.h>
|
||||
#include <stdint.h>
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/units/timestamp.h"
|
||||
#include "api/video/encoded_image.h"
|
||||
#include "api/video/video_codec_type.h"
|
||||
#include "modules/rtp_rtcp/source/rtp_video_header.h"
|
||||
#include "modules/video_coding/include/video_codec_interface.h"
|
||||
#include "modules/video_coding/include/video_coding_defines.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// TODO(philipel): Move transport specific info out of EncodedFrame.
|
||||
// NOTE: This class is still under development and may change without notice.
|
||||
class EncodedFrame : public EncodedImage {
|
||||
public:
|
||||
static const uint8_t kMaxFrameReferences = 5;
|
||||
|
||||
EncodedFrame() = default;
|
||||
EncodedFrame(const EncodedFrame&) = default;
|
||||
virtual ~EncodedFrame() {}
|
||||
|
||||
// When this frame was received.
|
||||
// TODO(bugs.webrtc.org/13756): Use Timestamp instead of int.
|
||||
virtual int64_t ReceivedTime() const { return -1; }
|
||||
// Returns a Timestamp from `ReceivedTime`, or nullopt if there is no receive
|
||||
// time.
|
||||
absl::optional<webrtc::Timestamp> ReceivedTimestamp() const;
|
||||
|
||||
// When this frame should be rendered.
|
||||
// TODO(bugs.webrtc.org/13756): Use Timestamp instead of int.
|
||||
virtual int64_t RenderTime() const { return _renderTimeMs; }
|
||||
// TODO(bugs.webrtc.org/13756): Migrate to ReceivedTimestamp.
|
||||
int64_t RenderTimeMs() const { return _renderTimeMs; }
|
||||
// Returns a Timestamp from `RenderTime`, or nullopt if there is no
|
||||
// render time.
|
||||
absl::optional<webrtc::Timestamp> RenderTimestamp() const;
|
||||
|
||||
// This information is currently needed by the timing calculation class.
|
||||
// TODO(philipel): Remove this function when a new timing class has
|
||||
// been implemented.
|
||||
virtual bool delayed_by_retransmission() const;
|
||||
|
||||
bool is_keyframe() const { return num_references == 0; }
|
||||
|
||||
void SetId(int64_t id) { id_ = id; }
|
||||
int64_t Id() const { return id_; }
|
||||
|
||||
uint8_t PayloadType() const { return _payloadType; }
|
||||
|
||||
void SetRenderTime(const int64_t renderTimeMs) {
|
||||
_renderTimeMs = renderTimeMs;
|
||||
}
|
||||
|
||||
const webrtc::EncodedImage& EncodedImage() const {
|
||||
return static_cast<const webrtc::EncodedImage&>(*this);
|
||||
}
|
||||
|
||||
const CodecSpecificInfo* CodecSpecific() const { return &_codecSpecificInfo; }
|
||||
void SetCodecSpecific(const CodecSpecificInfo* codec_specific) {
|
||||
_codecSpecificInfo = *codec_specific;
|
||||
}
|
||||
|
||||
// TODO(philipel): Add simple modify/access functions to prevent adding too
|
||||
// many `references`.
|
||||
size_t num_references = 0;
|
||||
int64_t references[kMaxFrameReferences];
|
||||
// Is this subframe the last one in the superframe (In RTP stream that would
|
||||
// mean that the last packet has a marker bit set).
|
||||
bool is_last_spatial_layer = true;
|
||||
|
||||
protected:
|
||||
// TODO(https://bugs.webrtc.org/9378): Move RTP specifics down into a
|
||||
// transport-aware subclass, eg RtpFrameObject.
|
||||
void CopyCodecSpecific(const RTPVideoHeader* header);
|
||||
|
||||
// TODO(https://bugs.webrtc.org/9378): Make fields private with
|
||||
// getters/setters as needed.
|
||||
int64_t _renderTimeMs = -1;
|
||||
uint8_t _payloadType = 0;
|
||||
CodecSpecificInfo _codecSpecificInfo;
|
||||
VideoCodecType _codec = kVideoCodecGeneric;
|
||||
|
||||
private:
|
||||
// The ID of the frame is determined from RTP level information. The IDs are
|
||||
// used to describe order and dependencies between frames.
|
||||
int64_t id_ = -1;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_ENCODED_FRAME_H_
|
||||
104
TMessagesProj/jni/voip/webrtc/api/video/encoded_image.cc
Normal file
104
TMessagesProj/jni/voip/webrtc/api/video/encoded_image.cc
Normal file
|
|
@ -0,0 +1,104 @@
|
|||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/encoded_image.h"
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
EncodedImageBuffer::EncodedImageBuffer(size_t size) : size_(size) {
|
||||
buffer_ = static_cast<uint8_t*>(malloc(size));
|
||||
}
|
||||
|
||||
EncodedImageBuffer::EncodedImageBuffer(const uint8_t* data, size_t size)
|
||||
: EncodedImageBuffer(size) {
|
||||
memcpy(buffer_, data, size);
|
||||
}
|
||||
|
||||
EncodedImageBuffer::~EncodedImageBuffer() {
|
||||
free(buffer_);
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<EncodedImageBuffer> EncodedImageBuffer::Create(size_t size) {
|
||||
return rtc::make_ref_counted<EncodedImageBuffer>(size);
|
||||
}
|
||||
// static
|
||||
rtc::scoped_refptr<EncodedImageBuffer> EncodedImageBuffer::Create(
|
||||
const uint8_t* data,
|
||||
size_t size) {
|
||||
return rtc::make_ref_counted<EncodedImageBuffer>(data, size);
|
||||
}
|
||||
|
||||
const uint8_t* EncodedImageBuffer::data() const {
|
||||
return buffer_;
|
||||
}
|
||||
uint8_t* EncodedImageBuffer::data() {
|
||||
return buffer_;
|
||||
}
|
||||
size_t EncodedImageBuffer::size() const {
|
||||
return size_;
|
||||
}
|
||||
|
||||
void EncodedImageBuffer::Realloc(size_t size) {
|
||||
// Calling realloc with size == 0 is equivalent to free, and returns nullptr.
|
||||
// Which is confusing on systems where malloc(0) doesn't return a nullptr.
|
||||
// More specifically, it breaks expectations of
|
||||
// VCMSessionInfo::UpdateDataPointers.
|
||||
RTC_DCHECK(size > 0);
|
||||
buffer_ = static_cast<uint8_t*>(realloc(buffer_, size));
|
||||
size_ = size;
|
||||
}
|
||||
|
||||
EncodedImage::EncodedImage() = default;
|
||||
|
||||
EncodedImage::EncodedImage(EncodedImage&&) = default;
|
||||
EncodedImage::EncodedImage(const EncodedImage&) = default;
|
||||
|
||||
EncodedImage::~EncodedImage() = default;
|
||||
|
||||
EncodedImage& EncodedImage::operator=(EncodedImage&&) = default;
|
||||
EncodedImage& EncodedImage::operator=(const EncodedImage&) = default;
|
||||
|
||||
void EncodedImage::SetEncodeTime(int64_t encode_start_ms,
|
||||
int64_t encode_finish_ms) {
|
||||
timing_.encode_start_ms = encode_start_ms;
|
||||
timing_.encode_finish_ms = encode_finish_ms;
|
||||
}
|
||||
|
||||
webrtc::Timestamp EncodedImage::CaptureTime() const {
|
||||
return capture_time_ms_ > 0 ? Timestamp::Millis(capture_time_ms_)
|
||||
: Timestamp::MinusInfinity();
|
||||
}
|
||||
|
||||
absl::optional<size_t> EncodedImage::SpatialLayerFrameSize(
|
||||
int spatial_index) const {
|
||||
RTC_DCHECK_GE(spatial_index, 0);
|
||||
RTC_DCHECK_LE(spatial_index, spatial_index_.value_or(0));
|
||||
|
||||
auto it = spatial_layer_frame_size_bytes_.find(spatial_index);
|
||||
if (it == spatial_layer_frame_size_bytes_.end()) {
|
||||
return absl::nullopt;
|
||||
}
|
||||
|
||||
return it->second;
|
||||
}
|
||||
|
||||
void EncodedImage::SetSpatialLayerFrameSize(int spatial_index,
|
||||
size_t size_bytes) {
|
||||
RTC_DCHECK_GE(spatial_index, 0);
|
||||
RTC_DCHECK_LE(spatial_index, spatial_index_.value_or(0));
|
||||
RTC_DCHECK_GE(size_bytes, 0);
|
||||
spatial_layer_frame_size_bytes_[spatial_index] = size_bytes;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
267
TMessagesProj/jni/voip/webrtc/api/video/encoded_image.h
Normal file
267
TMessagesProj/jni/voip/webrtc/api/video/encoded_image.h
Normal file
|
|
@ -0,0 +1,267 @@
|
|||
/*
|
||||
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_ENCODED_IMAGE_H_
|
||||
#define API_VIDEO_ENCODED_IMAGE_H_
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#include <map>
|
||||
#include <utility>
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/rtp_packet_infos.h"
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/units/timestamp.h"
|
||||
#include "api/video/color_space.h"
|
||||
#include "api/video/video_codec_constants.h"
|
||||
#include "api/video/video_content_type.h"
|
||||
#include "api/video/video_frame_type.h"
|
||||
#include "api/video/video_rotation.h"
|
||||
#include "api/video/video_timing.h"
|
||||
#include "rtc_base/checks.h"
|
||||
#include "rtc_base/ref_count.h"
|
||||
#include "rtc_base/system/rtc_export.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Abstract interface for buffer storage. Intended to support buffers owned by
|
||||
// external encoders with special release requirements, e.g, java encoders with
|
||||
// releaseOutputBuffer.
|
||||
class EncodedImageBufferInterface : public rtc::RefCountInterface {
|
||||
public:
|
||||
virtual const uint8_t* data() const = 0;
|
||||
// TODO(bugs.webrtc.org/9378): Make interface essentially read-only, delete
|
||||
// this non-const data method.
|
||||
virtual uint8_t* data() = 0;
|
||||
virtual size_t size() const = 0;
|
||||
};
|
||||
|
||||
// Basic implementation of EncodedImageBufferInterface.
|
||||
class RTC_EXPORT EncodedImageBuffer : public EncodedImageBufferInterface {
|
||||
public:
|
||||
static rtc::scoped_refptr<EncodedImageBuffer> Create() { return Create(0); }
|
||||
static rtc::scoped_refptr<EncodedImageBuffer> Create(size_t size);
|
||||
static rtc::scoped_refptr<EncodedImageBuffer> Create(const uint8_t* data,
|
||||
size_t size);
|
||||
|
||||
const uint8_t* data() const override;
|
||||
uint8_t* data() override;
|
||||
size_t size() const override;
|
||||
void Realloc(size_t t);
|
||||
|
||||
protected:
|
||||
explicit EncodedImageBuffer(size_t size);
|
||||
EncodedImageBuffer(const uint8_t* data, size_t size);
|
||||
~EncodedImageBuffer();
|
||||
|
||||
size_t size_;
|
||||
uint8_t* buffer_;
|
||||
};
|
||||
|
||||
// TODO(bug.webrtc.org/9378): This is a legacy api class, which is slowly being
|
||||
// cleaned up. Direct use of its members is strongly discouraged.
|
||||
class RTC_EXPORT EncodedImage {
|
||||
public:
|
||||
EncodedImage();
|
||||
EncodedImage(EncodedImage&&);
|
||||
EncodedImage(const EncodedImage&);
|
||||
|
||||
~EncodedImage();
|
||||
|
||||
EncodedImage& operator=(EncodedImage&&);
|
||||
EncodedImage& operator=(const EncodedImage&);
|
||||
|
||||
// Frame capture time in RTP timestamp representation (90kHz).
|
||||
void SetRtpTimestamp(uint32_t timestamp) { timestamp_rtp_ = timestamp; }
|
||||
uint32_t RtpTimestamp() const { return timestamp_rtp_; }
|
||||
|
||||
void SetEncodeTime(int64_t encode_start_ms, int64_t encode_finish_ms);
|
||||
|
||||
// Frame capture time in local time.
|
||||
Timestamp CaptureTime() const;
|
||||
|
||||
// Frame capture time in ntp epoch time, i.e. time since 1st Jan 1900
|
||||
int64_t NtpTimeMs() const { return ntp_time_ms_; }
|
||||
|
||||
// Every simulcast layer (= encoding) has its own encoder and RTP stream.
|
||||
// There can be no dependencies between different simulcast layers.
|
||||
absl::optional<int> SimulcastIndex() const { return simulcast_index_; }
|
||||
void SetSimulcastIndex(absl::optional<int> simulcast_index) {
|
||||
RTC_DCHECK_GE(simulcast_index.value_or(0), 0);
|
||||
RTC_DCHECK_LT(simulcast_index.value_or(0), kMaxSimulcastStreams);
|
||||
simulcast_index_ = simulcast_index;
|
||||
}
|
||||
|
||||
const absl::optional<Timestamp>& CaptureTimeIdentifier() const {
|
||||
return capture_time_identifier_;
|
||||
}
|
||||
void SetCaptureTimeIdentifier(
|
||||
const absl::optional<Timestamp>& capture_time_identifier) {
|
||||
capture_time_identifier_ = capture_time_identifier;
|
||||
}
|
||||
|
||||
// Encoded images can have dependencies between spatial and/or temporal
|
||||
// layers, depending on the scalability mode used by the encoder. See diagrams
|
||||
// at https://w3c.github.io/webrtc-svc/#dependencydiagrams*.
|
||||
absl::optional<int> SpatialIndex() const { return spatial_index_; }
|
||||
void SetSpatialIndex(absl::optional<int> spatial_index) {
|
||||
RTC_DCHECK_GE(spatial_index.value_or(0), 0);
|
||||
RTC_DCHECK_LT(spatial_index.value_or(0), kMaxSpatialLayers);
|
||||
spatial_index_ = spatial_index;
|
||||
}
|
||||
|
||||
absl::optional<int> TemporalIndex() const { return temporal_index_; }
|
||||
void SetTemporalIndex(absl::optional<int> temporal_index) {
|
||||
RTC_DCHECK_GE(temporal_index_.value_or(0), 0);
|
||||
RTC_DCHECK_LT(temporal_index_.value_or(0), kMaxTemporalStreams);
|
||||
temporal_index_ = temporal_index;
|
||||
}
|
||||
|
||||
// These methods can be used to set/get size of subframe with spatial index
|
||||
// `spatial_index` on encoded frames that consist of multiple spatial layers.
|
||||
absl::optional<size_t> SpatialLayerFrameSize(int spatial_index) const;
|
||||
void SetSpatialLayerFrameSize(int spatial_index, size_t size_bytes);
|
||||
|
||||
const webrtc::ColorSpace* ColorSpace() const {
|
||||
return color_space_ ? &*color_space_ : nullptr;
|
||||
}
|
||||
void SetColorSpace(const absl::optional<webrtc::ColorSpace>& color_space) {
|
||||
color_space_ = color_space;
|
||||
}
|
||||
|
||||
absl::optional<VideoPlayoutDelay> PlayoutDelay() const {
|
||||
return playout_delay_;
|
||||
}
|
||||
|
||||
void SetPlayoutDelay(absl::optional<VideoPlayoutDelay> playout_delay) {
|
||||
playout_delay_ = playout_delay;
|
||||
}
|
||||
|
||||
// These methods along with the private member video_frame_tracking_id_ are
|
||||
// meant for media quality testing purpose only.
|
||||
absl::optional<uint16_t> VideoFrameTrackingId() const {
|
||||
return video_frame_tracking_id_;
|
||||
}
|
||||
void SetVideoFrameTrackingId(absl::optional<uint16_t> tracking_id) {
|
||||
video_frame_tracking_id_ = tracking_id;
|
||||
}
|
||||
|
||||
const RtpPacketInfos& PacketInfos() const { return packet_infos_; }
|
||||
void SetPacketInfos(RtpPacketInfos packet_infos) {
|
||||
packet_infos_ = std::move(packet_infos);
|
||||
}
|
||||
|
||||
bool RetransmissionAllowed() const { return retransmission_allowed_; }
|
||||
void SetRetransmissionAllowed(bool retransmission_allowed) {
|
||||
retransmission_allowed_ = retransmission_allowed;
|
||||
}
|
||||
|
||||
size_t size() const { return size_; }
|
||||
void set_size(size_t new_size) {
|
||||
// Allow set_size(0) even if we have no buffer.
|
||||
RTC_DCHECK_LE(new_size, new_size == 0 ? 0 : capacity());
|
||||
size_ = new_size;
|
||||
}
|
||||
|
||||
void SetEncodedData(
|
||||
rtc::scoped_refptr<EncodedImageBufferInterface> encoded_data) {
|
||||
encoded_data_ = encoded_data;
|
||||
size_ = encoded_data->size();
|
||||
}
|
||||
|
||||
void ClearEncodedData() {
|
||||
encoded_data_ = nullptr;
|
||||
size_ = 0;
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<EncodedImageBufferInterface> GetEncodedData() const {
|
||||
return encoded_data_;
|
||||
}
|
||||
|
||||
const uint8_t* data() const {
|
||||
return encoded_data_ ? encoded_data_->data() : nullptr;
|
||||
}
|
||||
|
||||
// Returns whether the encoded image can be considered to be of target
|
||||
// quality.
|
||||
bool IsAtTargetQuality() const { return at_target_quality_; }
|
||||
|
||||
// Sets that the encoded image can be considered to be of target quality to
|
||||
// true or false.
|
||||
void SetAtTargetQuality(bool at_target_quality) {
|
||||
at_target_quality_ = at_target_quality;
|
||||
}
|
||||
|
||||
webrtc::VideoFrameType FrameType() const { return _frameType; }
|
||||
|
||||
void SetFrameType(webrtc::VideoFrameType frame_type) {
|
||||
_frameType = frame_type;
|
||||
}
|
||||
VideoContentType contentType() const { return content_type_; }
|
||||
VideoRotation rotation() const { return rotation_; }
|
||||
|
||||
uint32_t _encodedWidth = 0;
|
||||
uint32_t _encodedHeight = 0;
|
||||
// NTP time of the capture time in local timebase in milliseconds.
|
||||
// TODO(minyue): make this member private.
|
||||
int64_t ntp_time_ms_ = 0;
|
||||
int64_t capture_time_ms_ = 0;
|
||||
VideoFrameType _frameType = VideoFrameType::kVideoFrameDelta;
|
||||
VideoRotation rotation_ = kVideoRotation_0;
|
||||
VideoContentType content_type_ = VideoContentType::UNSPECIFIED;
|
||||
int qp_ = -1; // Quantizer value.
|
||||
|
||||
struct Timing {
|
||||
uint8_t flags = VideoSendTiming::kInvalid;
|
||||
int64_t encode_start_ms = 0;
|
||||
int64_t encode_finish_ms = 0;
|
||||
int64_t packetization_finish_ms = 0;
|
||||
int64_t pacer_exit_ms = 0;
|
||||
int64_t network_timestamp_ms = 0;
|
||||
int64_t network2_timestamp_ms = 0;
|
||||
int64_t receive_start_ms = 0;
|
||||
int64_t receive_finish_ms = 0;
|
||||
} timing_;
|
||||
EncodedImage::Timing video_timing() const { return timing_; }
|
||||
EncodedImage::Timing* video_timing_mutable() { return &timing_; }
|
||||
|
||||
private:
|
||||
size_t capacity() const { return encoded_data_ ? encoded_data_->size() : 0; }
|
||||
|
||||
// When set, indicates that all future frames will be constrained with those
|
||||
// limits until the application indicates a change again.
|
||||
absl::optional<VideoPlayoutDelay> playout_delay_;
|
||||
|
||||
rtc::scoped_refptr<EncodedImageBufferInterface> encoded_data_;
|
||||
size_t size_ = 0; // Size of encoded frame data.
|
||||
uint32_t timestamp_rtp_ = 0;
|
||||
absl::optional<int> simulcast_index_;
|
||||
absl::optional<Timestamp> capture_time_identifier_;
|
||||
absl::optional<int> spatial_index_;
|
||||
absl::optional<int> temporal_index_;
|
||||
std::map<int, size_t> spatial_layer_frame_size_bytes_;
|
||||
absl::optional<webrtc::ColorSpace> color_space_;
|
||||
// This field is meant for media quality testing purpose only. When enabled it
|
||||
// carries the webrtc::VideoFrame id field from the sender to the receiver.
|
||||
absl::optional<uint16_t> video_frame_tracking_id_;
|
||||
// Information about packets used to assemble this video frame. This is needed
|
||||
// by `SourceTracker` when the frame is delivered to the RTCRtpReceiver's
|
||||
// MediaStreamTrack, in order to implement getContributingSources(). See:
|
||||
// https://w3c.github.io/webrtc-pc/#dom-rtcrtpreceiver-getcontributingsources
|
||||
RtpPacketInfos packet_infos_;
|
||||
bool retransmission_allowed_ = true;
|
||||
// True if the encoded image can be considered to be of target quality.
|
||||
bool at_target_quality_ = false;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_ENCODED_IMAGE_H_
|
||||
280
TMessagesProj/jni/voip/webrtc/api/video/frame_buffer.cc
Normal file
280
TMessagesProj/jni/voip/webrtc/api/video/frame_buffer.cc
Normal file
|
|
@ -0,0 +1,280 @@
|
|||
/*
|
||||
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/frame_buffer.h"
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
#include "absl/algorithm/container.h"
|
||||
#include "absl/container/inlined_vector.h"
|
||||
#include "rtc_base/logging.h"
|
||||
#include "rtc_base/numerics/sequence_number_util.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace {
|
||||
bool ValidReferences(const EncodedFrame& frame) {
|
||||
// All references must point backwards, and duplicates are not allowed.
|
||||
for (size_t i = 0; i < frame.num_references; ++i) {
|
||||
if (frame.references[i] >= frame.Id())
|
||||
return false;
|
||||
|
||||
for (size_t j = i + 1; j < frame.num_references; ++j) {
|
||||
if (frame.references[i] == frame.references[j])
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
// Since FrameBuffer::FrameInfo is private it can't be used in the function
|
||||
// signature, hence the FrameIteratorT type.
|
||||
template <typename FrameIteratorT>
|
||||
rtc::ArrayView<const int64_t> GetReferences(const FrameIteratorT& it) {
|
||||
return {it->second.encoded_frame->references,
|
||||
std::min<size_t>(it->second.encoded_frame->num_references,
|
||||
EncodedFrame::kMaxFrameReferences)};
|
||||
}
|
||||
|
||||
template <typename FrameIteratorT>
|
||||
int64_t GetFrameId(const FrameIteratorT& it) {
|
||||
return it->first;
|
||||
}
|
||||
|
||||
template <typename FrameIteratorT>
|
||||
uint32_t GetTimestamp(const FrameIteratorT& it) {
|
||||
return it->second.encoded_frame->RtpTimestamp();
|
||||
}
|
||||
|
||||
template <typename FrameIteratorT>
|
||||
bool IsLastFrameInTemporalUnit(const FrameIteratorT& it) {
|
||||
return it->second.encoded_frame->is_last_spatial_layer;
|
||||
}
|
||||
} // namespace
|
||||
|
||||
FrameBuffer::FrameBuffer(int max_size,
|
||||
int max_decode_history,
|
||||
const FieldTrialsView& field_trials)
|
||||
: legacy_frame_id_jump_behavior_(
|
||||
!field_trials.IsDisabled("WebRTC-LegacyFrameIdJumpBehavior")),
|
||||
max_size_(max_size),
|
||||
decoded_frame_history_(max_decode_history) {}
|
||||
|
||||
bool FrameBuffer::InsertFrame(std::unique_ptr<EncodedFrame> frame) {
|
||||
if (!ValidReferences(*frame)) {
|
||||
RTC_DLOG(LS_WARNING) << "Frame " << frame->Id()
|
||||
<< " has invalid references, dropping frame.";
|
||||
return false;
|
||||
}
|
||||
|
||||
if (frame->Id() <= decoded_frame_history_.GetLastDecodedFrameId()) {
|
||||
if (legacy_frame_id_jump_behavior_ && frame->is_keyframe() &&
|
||||
AheadOf(frame->RtpTimestamp(),
|
||||
*decoded_frame_history_.GetLastDecodedFrameTimestamp())) {
|
||||
RTC_DLOG(LS_WARNING)
|
||||
<< "Keyframe " << frame->Id()
|
||||
<< " has newer timestamp but older picture id, clearing buffer.";
|
||||
Clear();
|
||||
} else {
|
||||
// Already decoded past this frame.
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (frames_.size() == max_size_) {
|
||||
if (frame->is_keyframe()) {
|
||||
RTC_DLOG(LS_WARNING) << "Keyframe " << frame->Id()
|
||||
<< " inserted into full buffer, clearing buffer.";
|
||||
Clear();
|
||||
} else {
|
||||
// No space for this frame.
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
const int64_t frame_id = frame->Id();
|
||||
auto insert_res = frames_.emplace(frame_id, FrameInfo{std::move(frame)});
|
||||
if (!insert_res.second) {
|
||||
// Frame has already been inserted.
|
||||
return false;
|
||||
}
|
||||
|
||||
if (frames_.size() == max_size_) {
|
||||
RTC_DLOG(LS_WARNING) << "Frame " << frame_id
|
||||
<< " inserted, buffer is now full.";
|
||||
}
|
||||
|
||||
PropagateContinuity(insert_res.first);
|
||||
FindNextAndLastDecodableTemporalUnit();
|
||||
return true;
|
||||
}
|
||||
|
||||
absl::InlinedVector<std::unique_ptr<EncodedFrame>, 4>
|
||||
FrameBuffer::ExtractNextDecodableTemporalUnit() {
|
||||
absl::InlinedVector<std::unique_ptr<EncodedFrame>, 4> res;
|
||||
if (!next_decodable_temporal_unit_) {
|
||||
return res;
|
||||
}
|
||||
|
||||
auto end_it = std::next(next_decodable_temporal_unit_->last_frame);
|
||||
for (auto it = next_decodable_temporal_unit_->first_frame; it != end_it;
|
||||
++it) {
|
||||
decoded_frame_history_.InsertDecoded(GetFrameId(it), GetTimestamp(it));
|
||||
res.push_back(std::move(it->second.encoded_frame));
|
||||
}
|
||||
|
||||
DropNextDecodableTemporalUnit();
|
||||
return res;
|
||||
}
|
||||
|
||||
void FrameBuffer::DropNextDecodableTemporalUnit() {
|
||||
if (!next_decodable_temporal_unit_) {
|
||||
return;
|
||||
}
|
||||
|
||||
auto end_it = std::next(next_decodable_temporal_unit_->last_frame);
|
||||
num_dropped_frames_ += std::count_if(
|
||||
frames_.begin(), end_it,
|
||||
[](const auto& f) { return f.second.encoded_frame != nullptr; });
|
||||
|
||||
frames_.erase(frames_.begin(), end_it);
|
||||
FindNextAndLastDecodableTemporalUnit();
|
||||
}
|
||||
|
||||
absl::optional<int64_t> FrameBuffer::LastContinuousFrameId() const {
|
||||
return last_continuous_frame_id_;
|
||||
}
|
||||
|
||||
absl::optional<int64_t> FrameBuffer::LastContinuousTemporalUnitFrameId() const {
|
||||
return last_continuous_temporal_unit_frame_id_;
|
||||
}
|
||||
|
||||
absl::optional<FrameBuffer::DecodabilityInfo>
|
||||
FrameBuffer::DecodableTemporalUnitsInfo() const {
|
||||
return decodable_temporal_units_info_;
|
||||
}
|
||||
|
||||
int FrameBuffer::GetTotalNumberOfContinuousTemporalUnits() const {
|
||||
return num_continuous_temporal_units_;
|
||||
}
|
||||
int FrameBuffer::GetTotalNumberOfDroppedFrames() const {
|
||||
return num_dropped_frames_;
|
||||
}
|
||||
|
||||
size_t FrameBuffer::CurrentSize() const {
|
||||
return frames_.size();
|
||||
}
|
||||
|
||||
bool FrameBuffer::IsContinuous(const FrameIterator& it) const {
|
||||
for (int64_t reference : GetReferences(it)) {
|
||||
if (decoded_frame_history_.WasDecoded(reference)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
auto reference_frame_it = frames_.find(reference);
|
||||
if (reference_frame_it != frames_.end() &&
|
||||
reference_frame_it->second.continuous) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void FrameBuffer::PropagateContinuity(const FrameIterator& frame_it) {
|
||||
for (auto it = frame_it; it != frames_.end(); ++it) {
|
||||
if (!it->second.continuous) {
|
||||
if (IsContinuous(it)) {
|
||||
it->second.continuous = true;
|
||||
if (last_continuous_frame_id_ < GetFrameId(it)) {
|
||||
last_continuous_frame_id_ = GetFrameId(it);
|
||||
}
|
||||
if (IsLastFrameInTemporalUnit(it)) {
|
||||
num_continuous_temporal_units_++;
|
||||
if (last_continuous_temporal_unit_frame_id_ < GetFrameId(it)) {
|
||||
last_continuous_temporal_unit_frame_id_ = GetFrameId(it);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void FrameBuffer::FindNextAndLastDecodableTemporalUnit() {
|
||||
next_decodable_temporal_unit_.reset();
|
||||
decodable_temporal_units_info_.reset();
|
||||
|
||||
if (!last_continuous_temporal_unit_frame_id_) {
|
||||
return;
|
||||
}
|
||||
|
||||
FrameIterator first_frame_it = frames_.begin();
|
||||
FrameIterator last_frame_it = frames_.begin();
|
||||
absl::InlinedVector<int64_t, 4> frames_in_temporal_unit;
|
||||
uint32_t last_decodable_temporal_unit_timestamp;
|
||||
for (auto frame_it = frames_.begin(); frame_it != frames_.end();) {
|
||||
if (GetFrameId(frame_it) > *last_continuous_temporal_unit_frame_id_) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (GetTimestamp(frame_it) != GetTimestamp(first_frame_it)) {
|
||||
frames_in_temporal_unit.clear();
|
||||
first_frame_it = frame_it;
|
||||
}
|
||||
|
||||
frames_in_temporal_unit.push_back(GetFrameId(frame_it));
|
||||
|
||||
last_frame_it = frame_it++;
|
||||
|
||||
if (IsLastFrameInTemporalUnit(last_frame_it)) {
|
||||
bool temporal_unit_decodable = true;
|
||||
for (auto it = first_frame_it; it != frame_it && temporal_unit_decodable;
|
||||
++it) {
|
||||
for (int64_t reference : GetReferences(it)) {
|
||||
if (!decoded_frame_history_.WasDecoded(reference) &&
|
||||
!absl::c_linear_search(frames_in_temporal_unit, reference)) {
|
||||
// A frame in the temporal unit has a non-decoded reference outside
|
||||
// the temporal unit, so it's not yet ready to be decoded.
|
||||
temporal_unit_decodable = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (temporal_unit_decodable) {
|
||||
if (!next_decodable_temporal_unit_) {
|
||||
next_decodable_temporal_unit_ = {first_frame_it, last_frame_it};
|
||||
}
|
||||
|
||||
last_decodable_temporal_unit_timestamp = GetTimestamp(first_frame_it);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (next_decodable_temporal_unit_) {
|
||||
decodable_temporal_units_info_ = {
|
||||
.next_rtp_timestamp =
|
||||
GetTimestamp(next_decodable_temporal_unit_->first_frame),
|
||||
.last_rtp_timestamp = last_decodable_temporal_unit_timestamp};
|
||||
}
|
||||
}
|
||||
|
||||
void FrameBuffer::Clear() {
|
||||
frames_.clear();
|
||||
next_decodable_temporal_unit_.reset();
|
||||
decodable_temporal_units_info_.reset();
|
||||
last_continuous_frame_id_.reset();
|
||||
last_continuous_temporal_unit_frame_id_.reset();
|
||||
decoded_frame_history_.Clear();
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
106
TMessagesProj/jni/voip/webrtc/api/video/frame_buffer.h
Normal file
106
TMessagesProj/jni/voip/webrtc/api/video/frame_buffer.h
Normal file
|
|
@ -0,0 +1,106 @@
|
|||
/*
|
||||
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_FRAME_BUFFER_H_
|
||||
#define API_VIDEO_FRAME_BUFFER_H_
|
||||
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <utility>
|
||||
|
||||
#include "absl/container/inlined_vector.h"
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/field_trials_view.h"
|
||||
#include "api/video/encoded_frame.h"
|
||||
#include "modules/video_coding/utility/decoded_frames_history.h"
|
||||
|
||||
namespace webrtc {
|
||||
// The high level idea of the FrameBuffer is to order frames received from the
|
||||
// network into a decodable stream. Frames are order by frame ID, and grouped
|
||||
// into temporal units by timestamp. A temporal unit is decodable after all
|
||||
// referenced frames outside the unit has been decoded, and a temporal unit is
|
||||
// continuous if all referenced frames are directly or indirectly decodable.
|
||||
// The FrameBuffer is thread-unsafe.
|
||||
class FrameBuffer {
|
||||
public:
|
||||
struct DecodabilityInfo {
|
||||
uint32_t next_rtp_timestamp;
|
||||
uint32_t last_rtp_timestamp;
|
||||
};
|
||||
|
||||
// The `max_size` determines the maximum number of frames the buffer will
|
||||
// store, and max_decode_history determines how far back (by frame ID) the
|
||||
// buffer will store if a frame was decoded or not.
|
||||
FrameBuffer(int max_size,
|
||||
int max_decode_history,
|
||||
// TODO(hta): remove field trials!
|
||||
const FieldTrialsView& field_trials);
|
||||
FrameBuffer(const FrameBuffer&) = delete;
|
||||
FrameBuffer& operator=(const FrameBuffer&) = delete;
|
||||
~FrameBuffer() = default;
|
||||
|
||||
// Inserted frames may only reference backwards, and must have no duplicate
|
||||
// references. Frame insertion will fail if `frame` is a duplicate, has
|
||||
// already been decoded, invalid, or if the buffer is full and the frame is
|
||||
// not a keyframe. Returns true if the frame was successfully inserted.
|
||||
bool InsertFrame(std::unique_ptr<EncodedFrame> frame);
|
||||
|
||||
// Mark all frames belonging to the next decodable temporal unit as decoded
|
||||
// and returns them.
|
||||
absl::InlinedVector<std::unique_ptr<EncodedFrame>, 4>
|
||||
ExtractNextDecodableTemporalUnit();
|
||||
|
||||
// Drop all frames in the next decodable unit.
|
||||
void DropNextDecodableTemporalUnit();
|
||||
|
||||
absl::optional<int64_t> LastContinuousFrameId() const;
|
||||
absl::optional<int64_t> LastContinuousTemporalUnitFrameId() const;
|
||||
absl::optional<DecodabilityInfo> DecodableTemporalUnitsInfo() const;
|
||||
|
||||
int GetTotalNumberOfContinuousTemporalUnits() const;
|
||||
int GetTotalNumberOfDroppedFrames() const;
|
||||
size_t CurrentSize() const;
|
||||
|
||||
private:
|
||||
struct FrameInfo {
|
||||
std::unique_ptr<EncodedFrame> encoded_frame;
|
||||
bool continuous = false;
|
||||
};
|
||||
|
||||
using FrameMap = std::map<int64_t, FrameInfo>;
|
||||
using FrameIterator = FrameMap::iterator;
|
||||
|
||||
struct TemporalUnit {
|
||||
// Both first and last are inclusive.
|
||||
FrameIterator first_frame;
|
||||
FrameIterator last_frame;
|
||||
};
|
||||
|
||||
bool IsContinuous(const FrameIterator& it) const;
|
||||
void PropagateContinuity(const FrameIterator& frame_it);
|
||||
void FindNextAndLastDecodableTemporalUnit();
|
||||
void Clear();
|
||||
|
||||
const bool legacy_frame_id_jump_behavior_;
|
||||
const size_t max_size_;
|
||||
FrameMap frames_;
|
||||
absl::optional<TemporalUnit> next_decodable_temporal_unit_;
|
||||
absl::optional<DecodabilityInfo> decodable_temporal_units_info_;
|
||||
absl::optional<int64_t> last_continuous_frame_id_;
|
||||
absl::optional<int64_t> last_continuous_temporal_unit_frame_id_;
|
||||
video_coding::DecodedFramesHistory decoded_frame_history_;
|
||||
|
||||
int num_continuous_temporal_units_ = 0;
|
||||
int num_dropped_frames_ = 0;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_FRAME_BUFFER_H_
|
||||
393
TMessagesProj/jni/voip/webrtc/api/video/frame_buffer_unittest.cc
Normal file
393
TMessagesProj/jni/voip/webrtc/api/video/frame_buffer_unittest.cc
Normal file
|
|
@ -0,0 +1,393 @@
|
|||
/*
|
||||
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
#include "api/video/frame_buffer.h"
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "api/video/encoded_frame.h"
|
||||
#include "test/fake_encoded_frame.h"
|
||||
#include "test/gmock.h"
|
||||
#include "test/gtest.h"
|
||||
#include "test/scoped_key_value_config.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace {
|
||||
|
||||
using ::testing::ElementsAre;
|
||||
using ::testing::Eq;
|
||||
using ::testing::IsEmpty;
|
||||
using ::testing::Matches;
|
||||
|
||||
MATCHER_P(FrameWithId, id, "") {
|
||||
return Matches(Eq(id))(arg->Id());
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, RejectInvalidRefs) {
|
||||
test::ScopedKeyValueConfig field_trials;
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
|
||||
field_trials);
|
||||
// Ref must be less than the id of this frame.
|
||||
EXPECT_FALSE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(0).Id(0).Refs({0}).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(absl::nullopt));
|
||||
|
||||
// Duplicate ids are also invalid.
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
|
||||
EXPECT_FALSE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(20).Id(2).Refs({1, 1}).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(1));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, LastContinuousUpdatesOnInsertedFrames) {
|
||||
test::ScopedKeyValueConfig field_trials;
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
|
||||
field_trials);
|
||||
EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(absl::nullopt));
|
||||
EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(absl::nullopt));
|
||||
|
||||
EXPECT_TRUE(
|
||||
buffer.InsertFrame(test::FakeFrameBuilder().Time(10).Id(1).Build()));
|
||||
EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(1));
|
||||
EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(absl::nullopt));
|
||||
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(10).Id(2).Refs({1}).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(2));
|
||||
EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(2));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, LastContinuousFrameReordering) {
|
||||
test::ScopedKeyValueConfig field_trials;
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
|
||||
field_trials);
|
||||
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(30).Id(3).Refs({2}).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(1));
|
||||
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(20).Id(2).Refs({1}).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(3));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, LastContinuousTemporalUnit) {
|
||||
test::ScopedKeyValueConfig field_trials;
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
|
||||
field_trials);
|
||||
|
||||
EXPECT_TRUE(
|
||||
buffer.InsertFrame(test::FakeFrameBuilder().Time(10).Id(1).Build()));
|
||||
EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(absl::nullopt));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(10).Id(2).Refs({1}).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(2));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, LastContinuousTemporalUnitReordering) {
|
||||
test::ScopedKeyValueConfig field_trials;
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
|
||||
field_trials);
|
||||
|
||||
EXPECT_TRUE(
|
||||
buffer.InsertFrame(test::FakeFrameBuilder().Time(10).Id(1).Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(20).Id(3).Refs({1}).Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(20).Id(4).Refs({2, 3}).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(absl::nullopt));
|
||||
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(10).Id(2).Refs({1}).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.LastContinuousTemporalUnitFrameId(), Eq(4));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, NextDecodable) {
|
||||
test::ScopedKeyValueConfig field_trials;
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
|
||||
field_trials);
|
||||
|
||||
EXPECT_THAT(buffer.DecodableTemporalUnitsInfo(), Eq(absl::nullopt));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.DecodableTemporalUnitsInfo()->next_rtp_timestamp, Eq(10U));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, AdvanceNextDecodableOnExtraction) {
|
||||
test::ScopedKeyValueConfig field_trials;
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
|
||||
field_trials);
|
||||
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(20).Id(2).AsLast().Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(30).Id(3).Refs({2}).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.DecodableTemporalUnitsInfo()->next_rtp_timestamp, Eq(10U));
|
||||
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(1)));
|
||||
EXPECT_THAT(buffer.DecodableTemporalUnitsInfo()->next_rtp_timestamp, Eq(20U));
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(2)));
|
||||
EXPECT_THAT(buffer.DecodableTemporalUnitsInfo()->next_rtp_timestamp, Eq(30U));
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(3)));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, AdvanceLastDecodableOnExtraction) {
|
||||
test::ScopedKeyValueConfig field_trials;
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
|
||||
field_trials);
|
||||
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(20).Id(2).Refs({1}).AsLast().Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(30).Id(3).Refs({1}).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.DecodableTemporalUnitsInfo()->last_rtp_timestamp, Eq(10U));
|
||||
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(1)));
|
||||
EXPECT_THAT(buffer.DecodableTemporalUnitsInfo()->last_rtp_timestamp, Eq(30U));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, FrameUpdatesNextDecodable) {
|
||||
test::ScopedKeyValueConfig field_trials;
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
|
||||
field_trials);
|
||||
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(20).Id(2).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.DecodableTemporalUnitsInfo()->next_rtp_timestamp, Eq(20U));
|
||||
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.DecodableTemporalUnitsInfo()->next_rtp_timestamp, Eq(10U));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, KeyframeClearsFullBuffer) {
|
||||
test::ScopedKeyValueConfig field_trials;
|
||||
FrameBuffer buffer(/*max_frame_slots=*/5, /*max_decode_history=*/10,
|
||||
field_trials);
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(20).Id(2).Refs({1}).AsLast().Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(30).Id(3).Refs({2}).AsLast().Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(40).Id(4).Refs({3}).AsLast().Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(50).Id(5).Refs({4}).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(5));
|
||||
|
||||
// Frame buffer is full
|
||||
EXPECT_FALSE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(60).Id(6).Refs({5}).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(5));
|
||||
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(70).Id(7).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.LastContinuousFrameId(), Eq(7));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, DropNextDecodableTemporalUnit) {
|
||||
test::ScopedKeyValueConfig field_trials;
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
|
||||
field_trials);
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(20).Id(2).Refs({1}).AsLast().Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(30).Id(3).Refs({1}).AsLast().Build()));
|
||||
|
||||
buffer.ExtractNextDecodableTemporalUnit();
|
||||
buffer.DropNextDecodableTemporalUnit();
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(3)));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, OldFramesAreIgnored) {
|
||||
test::ScopedKeyValueConfig field_trials;
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
|
||||
field_trials);
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(20).Id(2).Refs({1}).AsLast().Build()));
|
||||
|
||||
buffer.ExtractNextDecodableTemporalUnit();
|
||||
buffer.ExtractNextDecodableTemporalUnit();
|
||||
|
||||
EXPECT_FALSE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
|
||||
EXPECT_FALSE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(20).Id(2).Refs({1}).AsLast().Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(30).Id(3).Refs({1}).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(3)));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, ReturnFullTemporalUnitKSVC) {
|
||||
test::ScopedKeyValueConfig field_trials;
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
|
||||
field_trials);
|
||||
EXPECT_TRUE(
|
||||
buffer.InsertFrame(test::FakeFrameBuilder().Time(10).Id(1).Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(10).Id(2).Refs({1}).Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(10).Id(3).Refs({2}).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(1), FrameWithId(2), FrameWithId(3)));
|
||||
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(20).Id(4).Refs({3}).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(4)));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, InterleavedStream) {
|
||||
test::ScopedKeyValueConfig field_trials;
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
|
||||
field_trials);
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(20).Id(2).Refs({1}).AsLast().Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(30).Id(3).Refs({1}).AsLast().Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(40).Id(4).Refs({2}).AsLast().Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(50).Id(5).Refs({3}).AsLast().Build()));
|
||||
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(1)));
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(2)));
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(3)));
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(4)));
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(5)));
|
||||
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(70).Id(7).Refs({5}).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(7)));
|
||||
EXPECT_FALSE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(60).Id(6).Refs({4}).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(), IsEmpty());
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(90).Id(9).Refs({7}).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(9)));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, LegacyFrameIdJumpBehavior) {
|
||||
{
|
||||
test::ScopedKeyValueConfig field_trials(
|
||||
"WebRTC-LegacyFrameIdJumpBehavior/Disabled/");
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
|
||||
field_trials);
|
||||
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(20).Id(3).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(3)));
|
||||
EXPECT_FALSE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(30).Id(2).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(), IsEmpty());
|
||||
}
|
||||
|
||||
{
|
||||
// WebRTC-LegacyFrameIdJumpBehavior is disabled by default.
|
||||
test::ScopedKeyValueConfig field_trials;
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
|
||||
field_trials);
|
||||
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(20).Id(3).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(3)));
|
||||
EXPECT_FALSE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(30).Id(2).Refs({1}).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(), IsEmpty());
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(40).Id(1).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.ExtractNextDecodableTemporalUnit(),
|
||||
ElementsAre(FrameWithId(1)));
|
||||
}
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, TotalNumberOfContinuousTemporalUnits) {
|
||||
test::ScopedKeyValueConfig field_trials;
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
|
||||
field_trials);
|
||||
EXPECT_THAT(buffer.GetTotalNumberOfContinuousTemporalUnits(), Eq(0));
|
||||
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.GetTotalNumberOfContinuousTemporalUnits(), Eq(1));
|
||||
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(20).Id(2).Refs({1}).Build()));
|
||||
EXPECT_THAT(buffer.GetTotalNumberOfContinuousTemporalUnits(), Eq(1));
|
||||
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(40).Id(4).Refs({2}).Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(40).Id(5).Refs({3, 4}).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.GetTotalNumberOfContinuousTemporalUnits(), Eq(1));
|
||||
|
||||
// Reordered
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(20).Id(3).Refs({2}).AsLast().Build()));
|
||||
EXPECT_THAT(buffer.GetTotalNumberOfContinuousTemporalUnits(), Eq(3));
|
||||
}
|
||||
|
||||
TEST(FrameBuffer3Test, TotalNumberOfDroppedFrames) {
|
||||
test::ScopedKeyValueConfig field_trials;
|
||||
FrameBuffer buffer(/*max_frame_slots=*/10, /*max_decode_history=*/100,
|
||||
field_trials);
|
||||
EXPECT_THAT(buffer.GetTotalNumberOfDroppedFrames(), Eq(0));
|
||||
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(10).Id(1).AsLast().Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(20).Id(2).Refs({1}).Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(20).Id(3).Refs({2}).AsLast().Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(40).Id(4).Refs({1}).Build()));
|
||||
EXPECT_TRUE(buffer.InsertFrame(
|
||||
test::FakeFrameBuilder().Time(40).Id(5).Refs({4}).AsLast().Build()));
|
||||
|
||||
buffer.ExtractNextDecodableTemporalUnit();
|
||||
EXPECT_THAT(buffer.GetTotalNumberOfDroppedFrames(), Eq(0));
|
||||
|
||||
buffer.DropNextDecodableTemporalUnit();
|
||||
EXPECT_THAT(buffer.GetTotalNumberOfDroppedFrames(), Eq(2));
|
||||
|
||||
buffer.ExtractNextDecodableTemporalUnit();
|
||||
EXPECT_THAT(buffer.GetTotalNumberOfDroppedFrames(), Eq(2));
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace webrtc
|
||||
21
TMessagesProj/jni/voip/webrtc/api/video/hdr_metadata.cc
Normal file
21
TMessagesProj/jni/voip/webrtc/api/video/hdr_metadata.cc
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
/*
|
||||
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/hdr_metadata.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
HdrMasteringMetadata::Chromaticity::Chromaticity() = default;
|
||||
|
||||
HdrMasteringMetadata::HdrMasteringMetadata() = default;
|
||||
|
||||
HdrMetadata::HdrMetadata() = default;
|
||||
|
||||
} // namespace webrtc
|
||||
105
TMessagesProj/jni/voip/webrtc/api/video/hdr_metadata.h
Normal file
105
TMessagesProj/jni/voip/webrtc/api/video/hdr_metadata.h
Normal file
|
|
@ -0,0 +1,105 @@
|
|||
/*
|
||||
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_HDR_METADATA_H_
|
||||
#define API_VIDEO_HDR_METADATA_H_
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// SMPTE ST 2086 mastering metadata,
|
||||
// see https://ieeexplore.ieee.org/document/8353899.
|
||||
struct HdrMasteringMetadata {
|
||||
struct Chromaticity {
|
||||
Chromaticity();
|
||||
|
||||
bool operator==(const Chromaticity& rhs) const {
|
||||
return x == rhs.x && y == rhs.y;
|
||||
}
|
||||
|
||||
bool Validate() const {
|
||||
return x >= 0.0 && x <= 1.0 && y >= 0.0 && y <= 1.0;
|
||||
}
|
||||
|
||||
// xy chromaticity coordinates must be calculated as specified in ISO
|
||||
// 11664-3:2012 Section 7, and must be specified with four decimal places.
|
||||
// The x coordinate should be in the range [0.0001, 0.7400] and the y
|
||||
// coordinate should be in the range [0.0001, 0.8400]. Valid range [0.0000,
|
||||
// 1.0000].
|
||||
float x = 0.0f;
|
||||
float y = 0.0f;
|
||||
};
|
||||
|
||||
HdrMasteringMetadata();
|
||||
|
||||
bool operator==(const HdrMasteringMetadata& rhs) const {
|
||||
return ((primary_r == rhs.primary_r) && (primary_g == rhs.primary_g) &&
|
||||
(primary_b == rhs.primary_b) && (white_point == rhs.white_point) &&
|
||||
(luminance_max == rhs.luminance_max) &&
|
||||
(luminance_min == rhs.luminance_min));
|
||||
}
|
||||
|
||||
bool Validate() const {
|
||||
return luminance_max >= 0.0 && luminance_max <= 20000.0 &&
|
||||
luminance_min >= 0.0 && luminance_min <= 5.0 &&
|
||||
primary_r.Validate() && primary_g.Validate() &&
|
||||
primary_b.Validate() && white_point.Validate();
|
||||
}
|
||||
|
||||
// The nominal primaries of the mastering display.
|
||||
Chromaticity primary_r;
|
||||
Chromaticity primary_g;
|
||||
Chromaticity primary_b;
|
||||
|
||||
// The nominal chromaticity of the white point of the mastering display.
|
||||
Chromaticity white_point;
|
||||
|
||||
// The nominal maximum display luminance of the mastering display. Specified
|
||||
// in the unit candela/m2. The value should be in the range [5, 10000] with
|
||||
// zero decimal places. Valid range [0, 20000].
|
||||
float luminance_max = 0.0f;
|
||||
|
||||
// The nominal minimum display luminance of the mastering display. Specified
|
||||
// in the unit candela/m2. The value should be in the range [0.0001, 5.0000]
|
||||
// with four decimal places. Valid range [0.0000, 5.0000].
|
||||
float luminance_min = 0.0f;
|
||||
};
|
||||
|
||||
// High dynamic range (HDR) metadata common for HDR10 and WebM/VP9-based HDR
|
||||
// formats. This struct replicates the HDRMetadata struct defined in
|
||||
// https://cs.chromium.org/chromium/src/media/base/hdr_metadata.h
|
||||
struct HdrMetadata {
|
||||
HdrMetadata();
|
||||
|
||||
bool operator==(const HdrMetadata& rhs) const {
|
||||
return (
|
||||
(max_content_light_level == rhs.max_content_light_level) &&
|
||||
(max_frame_average_light_level == rhs.max_frame_average_light_level) &&
|
||||
(mastering_metadata == rhs.mastering_metadata));
|
||||
}
|
||||
|
||||
bool Validate() const {
|
||||
return max_content_light_level >= 0 && max_content_light_level <= 20000 &&
|
||||
max_frame_average_light_level >= 0 &&
|
||||
max_frame_average_light_level <= 20000 &&
|
||||
mastering_metadata.Validate();
|
||||
}
|
||||
|
||||
HdrMasteringMetadata mastering_metadata;
|
||||
// Max content light level (CLL), i.e. maximum brightness level present in the
|
||||
// stream, in nits. 1 nit = 1 candela/m2. Valid range [0, 20000].
|
||||
int max_content_light_level = 0;
|
||||
// Max frame-average light level (FALL), i.e. maximum average brightness of
|
||||
// the brightest frame in the stream, in nits. Valid range [0, 20000].
|
||||
int max_frame_average_light_level = 0;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_HDR_METADATA_H_
|
||||
213
TMessagesProj/jni/voip/webrtc/api/video/i010_buffer.cc
Normal file
213
TMessagesProj/jni/voip/webrtc/api/video/i010_buffer.cc
Normal file
|
|
@ -0,0 +1,213 @@
|
|||
/*
|
||||
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
#include "api/video/i010_buffer.h"
|
||||
|
||||
#include <utility>
|
||||
|
||||
#include "api/make_ref_counted.h"
|
||||
#include "api/video/i420_buffer.h"
|
||||
#include "rtc_base/checks.h"
|
||||
#include "third_party/libyuv/include/libyuv/convert.h"
|
||||
#include "third_party/libyuv/include/libyuv/scale.h"
|
||||
|
||||
// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
|
||||
static const int kBufferAlignment = 64;
|
||||
static const int kBytesPerPixel = 2;
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace {
|
||||
|
||||
int I010DataSize(int height, int stride_y, int stride_u, int stride_v) {
|
||||
return kBytesPerPixel *
|
||||
(stride_y * height + (stride_u + stride_v) * ((height + 1) / 2));
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
I010Buffer::I010Buffer(int width,
|
||||
int height,
|
||||
int stride_y,
|
||||
int stride_u,
|
||||
int stride_v)
|
||||
: width_(width),
|
||||
height_(height),
|
||||
stride_y_(stride_y),
|
||||
stride_u_(stride_u),
|
||||
stride_v_(stride_v),
|
||||
data_(static_cast<uint16_t*>(
|
||||
AlignedMalloc(I010DataSize(height, stride_y, stride_u, stride_v),
|
||||
kBufferAlignment))) {
|
||||
RTC_DCHECK_GT(width, 0);
|
||||
RTC_DCHECK_GT(height, 0);
|
||||
RTC_DCHECK_GE(stride_y, width);
|
||||
RTC_DCHECK_GE(stride_u, (width + 1) / 2);
|
||||
RTC_DCHECK_GE(stride_v, (width + 1) / 2);
|
||||
}
|
||||
|
||||
I010Buffer::~I010Buffer() {}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I010Buffer> I010Buffer::Create(int width, int height) {
|
||||
return rtc::make_ref_counted<I010Buffer>(width, height, width,
|
||||
(width + 1) / 2, (width + 1) / 2);
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I010Buffer> I010Buffer::Copy(
|
||||
const I010BufferInterface& source) {
|
||||
const int width = source.width();
|
||||
const int height = source.height();
|
||||
rtc::scoped_refptr<I010Buffer> buffer = Create(width, height);
|
||||
int res = libyuv::I010Copy(
|
||||
source.DataY(), source.StrideY(), source.DataU(), source.StrideU(),
|
||||
source.DataV(), source.StrideV(), buffer->MutableDataY(),
|
||||
buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(),
|
||||
buffer->MutableDataV(), buffer->StrideV(), width, height);
|
||||
RTC_DCHECK_EQ(res, 0);
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I010Buffer> I010Buffer::Copy(
|
||||
const I420BufferInterface& source) {
|
||||
const int width = source.width();
|
||||
const int height = source.height();
|
||||
rtc::scoped_refptr<I010Buffer> buffer = Create(width, height);
|
||||
int res = libyuv::I420ToI010(
|
||||
source.DataY(), source.StrideY(), source.DataU(), source.StrideU(),
|
||||
source.DataV(), source.StrideV(), buffer->MutableDataY(),
|
||||
buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(),
|
||||
buffer->MutableDataV(), buffer->StrideV(), width, height);
|
||||
RTC_DCHECK_EQ(res, 0);
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I010Buffer> I010Buffer::Rotate(
|
||||
const I010BufferInterface& src,
|
||||
VideoRotation rotation) {
|
||||
if (rotation == webrtc::kVideoRotation_0)
|
||||
return Copy(src);
|
||||
|
||||
RTC_CHECK(src.DataY());
|
||||
RTC_CHECK(src.DataU());
|
||||
RTC_CHECK(src.DataV());
|
||||
int rotated_width = src.width();
|
||||
int rotated_height = src.height();
|
||||
if (rotation == webrtc::kVideoRotation_90 ||
|
||||
rotation == webrtc::kVideoRotation_270) {
|
||||
std::swap(rotated_width, rotated_height);
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<webrtc::I010Buffer> buffer =
|
||||
Create(rotated_width, rotated_height);
|
||||
|
||||
int res = libyuv::I010Rotate(
|
||||
src.DataY(), src.StrideY(), src.DataU(), src.StrideU(), src.DataV(),
|
||||
src.StrideV(), buffer->MutableDataY(), buffer->StrideY(),
|
||||
buffer->MutableDataU(), buffer->StrideU(), buffer->MutableDataV(),
|
||||
buffer->StrideV(), src.width(), src.height(),
|
||||
static_cast<libyuv::RotationMode>(rotation));
|
||||
RTC_DCHECK_EQ(res, 0);
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<I420BufferInterface> I010Buffer::ToI420() {
|
||||
rtc::scoped_refptr<I420Buffer> i420_buffer =
|
||||
I420Buffer::Create(width(), height());
|
||||
int res = libyuv::I010ToI420(
|
||||
DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(),
|
||||
i420_buffer->MutableDataY(), i420_buffer->StrideY(),
|
||||
i420_buffer->MutableDataU(), i420_buffer->StrideU(),
|
||||
i420_buffer->MutableDataV(), i420_buffer->StrideV(), width(), height());
|
||||
RTC_DCHECK_EQ(res, 0);
|
||||
|
||||
return i420_buffer;
|
||||
}
|
||||
|
||||
int I010Buffer::width() const {
|
||||
return width_;
|
||||
}
|
||||
|
||||
int I010Buffer::height() const {
|
||||
return height_;
|
||||
}
|
||||
|
||||
const uint16_t* I010Buffer::DataY() const {
|
||||
return data_.get();
|
||||
}
|
||||
const uint16_t* I010Buffer::DataU() const {
|
||||
return data_.get() + stride_y_ * height_;
|
||||
}
|
||||
const uint16_t* I010Buffer::DataV() const {
|
||||
return data_.get() + stride_y_ * height_ + stride_u_ * ((height_ + 1) / 2);
|
||||
}
|
||||
|
||||
int I010Buffer::StrideY() const {
|
||||
return stride_y_;
|
||||
}
|
||||
int I010Buffer::StrideU() const {
|
||||
return stride_u_;
|
||||
}
|
||||
int I010Buffer::StrideV() const {
|
||||
return stride_v_;
|
||||
}
|
||||
|
||||
uint16_t* I010Buffer::MutableDataY() {
|
||||
return const_cast<uint16_t*>(DataY());
|
||||
}
|
||||
uint16_t* I010Buffer::MutableDataU() {
|
||||
return const_cast<uint16_t*>(DataU());
|
||||
}
|
||||
uint16_t* I010Buffer::MutableDataV() {
|
||||
return const_cast<uint16_t*>(DataV());
|
||||
}
|
||||
|
||||
void I010Buffer::CropAndScaleFrom(const I010BufferInterface& src,
|
||||
int offset_x,
|
||||
int offset_y,
|
||||
int crop_width,
|
||||
int crop_height) {
|
||||
RTC_CHECK_LE(crop_width, src.width());
|
||||
RTC_CHECK_LE(crop_height, src.height());
|
||||
RTC_CHECK_LE(crop_width + offset_x, src.width());
|
||||
RTC_CHECK_LE(crop_height + offset_y, src.height());
|
||||
RTC_CHECK_GE(offset_x, 0);
|
||||
RTC_CHECK_GE(offset_y, 0);
|
||||
|
||||
// Make sure offset is even so that u/v plane becomes aligned.
|
||||
const int uv_offset_x = offset_x / 2;
|
||||
const int uv_offset_y = offset_y / 2;
|
||||
offset_x = uv_offset_x * 2;
|
||||
offset_y = uv_offset_y * 2;
|
||||
|
||||
const uint16_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x;
|
||||
const uint16_t* u_plane =
|
||||
src.DataU() + src.StrideU() * uv_offset_y + uv_offset_x;
|
||||
const uint16_t* v_plane =
|
||||
src.DataV() + src.StrideV() * uv_offset_y + uv_offset_x;
|
||||
int res = libyuv::I420Scale_16(
|
||||
y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane, src.StrideV(),
|
||||
crop_width, crop_height, MutableDataY(), StrideY(), MutableDataU(),
|
||||
StrideU(), MutableDataV(), StrideV(), width(), height(),
|
||||
libyuv::kFilterBox);
|
||||
|
||||
RTC_DCHECK_EQ(res, 0);
|
||||
}
|
||||
|
||||
void I010Buffer::ScaleFrom(const I010BufferInterface& src) {
|
||||
CropAndScaleFrom(src, 0, 0, src.width(), src.height());
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
84
TMessagesProj/jni/voip/webrtc/api/video/i010_buffer.h
Normal file
84
TMessagesProj/jni/voip/webrtc/api/video/i010_buffer.h
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
/*
|
||||
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_I010_BUFFER_H_
|
||||
#define API_VIDEO_I010_BUFFER_H_
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/video/video_frame_buffer.h"
|
||||
#include "api/video/video_rotation.h"
|
||||
#include "rtc_base/memory/aligned_malloc.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Plain I010 buffer in standard memory.
|
||||
class I010Buffer : public I010BufferInterface {
|
||||
public:
|
||||
// Create a new buffer.
|
||||
static rtc::scoped_refptr<I010Buffer> Create(int width, int height);
|
||||
|
||||
// Create a new buffer and copy the pixel data.
|
||||
static rtc::scoped_refptr<I010Buffer> Copy(const I010BufferInterface& buffer);
|
||||
|
||||
// Convert and put I420 buffer into a new buffer.
|
||||
static rtc::scoped_refptr<I010Buffer> Copy(const I420BufferInterface& buffer);
|
||||
|
||||
// Return a rotated copy of `src`.
|
||||
static rtc::scoped_refptr<I010Buffer> Rotate(const I010BufferInterface& src,
|
||||
VideoRotation rotation);
|
||||
|
||||
// VideoFrameBuffer implementation.
|
||||
rtc::scoped_refptr<I420BufferInterface> ToI420() override;
|
||||
|
||||
// PlanarYuv16BBuffer implementation.
|
||||
int width() const override;
|
||||
int height() const override;
|
||||
const uint16_t* DataY() const override;
|
||||
const uint16_t* DataU() const override;
|
||||
const uint16_t* DataV() const override;
|
||||
int StrideY() const override;
|
||||
int StrideU() const override;
|
||||
int StrideV() const override;
|
||||
|
||||
uint16_t* MutableDataY();
|
||||
uint16_t* MutableDataU();
|
||||
uint16_t* MutableDataV();
|
||||
|
||||
// Scale the cropped area of `src` to the size of `this` buffer, and
|
||||
// write the result into `this`.
|
||||
void CropAndScaleFrom(const I010BufferInterface& src,
|
||||
int offset_x,
|
||||
int offset_y,
|
||||
int crop_width,
|
||||
int crop_height);
|
||||
|
||||
// Scale all of `src` to the size of `this` buffer, with no cropping.
|
||||
void ScaleFrom(const I010BufferInterface& src);
|
||||
|
||||
protected:
|
||||
I010Buffer(int width, int height, int stride_y, int stride_u, int stride_v);
|
||||
~I010Buffer() override;
|
||||
|
||||
private:
|
||||
const int width_;
|
||||
const int height_;
|
||||
const int stride_y_;
|
||||
const int stride_u_;
|
||||
const int stride_v_;
|
||||
const std::unique_ptr<uint16_t, AlignedFreeDeleter> data_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_I010_BUFFER_H_
|
||||
211
TMessagesProj/jni/voip/webrtc/api/video/i210_buffer.cc
Normal file
211
TMessagesProj/jni/voip/webrtc/api/video/i210_buffer.cc
Normal file
|
|
@ -0,0 +1,211 @@
|
|||
/*
|
||||
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
#include "api/video/i210_buffer.h"
|
||||
|
||||
#include <utility>
|
||||
|
||||
#include "api/make_ref_counted.h"
|
||||
#include "api/video/i420_buffer.h"
|
||||
#include "api/video/i422_buffer.h"
|
||||
#include "rtc_base/checks.h"
|
||||
#include "third_party/libyuv/include/libyuv/convert.h"
|
||||
#include "third_party/libyuv/include/libyuv/scale.h"
|
||||
|
||||
// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
|
||||
static const int kBufferAlignment = 64;
|
||||
static const int kBytesPerPixel = 2;
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace {
|
||||
|
||||
int I210DataSize(int height, int stride_y, int stride_u, int stride_v) {
|
||||
return kBytesPerPixel *
|
||||
(stride_y * height + stride_u * height + stride_v * height);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
I210Buffer::I210Buffer(int width,
|
||||
int height,
|
||||
int stride_y,
|
||||
int stride_u,
|
||||
int stride_v)
|
||||
: width_(width),
|
||||
height_(height),
|
||||
stride_y_(stride_y),
|
||||
stride_u_(stride_u),
|
||||
stride_v_(stride_v),
|
||||
data_(static_cast<uint16_t*>(
|
||||
AlignedMalloc(I210DataSize(height, stride_y, stride_u, stride_v),
|
||||
kBufferAlignment))) {
|
||||
RTC_DCHECK_GT(width, 0);
|
||||
RTC_DCHECK_GT(height, 0);
|
||||
RTC_DCHECK_GE(stride_y, width);
|
||||
RTC_DCHECK_GE(stride_u, (width + 1) / 2);
|
||||
RTC_DCHECK_GE(stride_v, (width + 1) / 2);
|
||||
}
|
||||
|
||||
I210Buffer::~I210Buffer() {}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I210Buffer> I210Buffer::Create(int width, int height) {
|
||||
return rtc::make_ref_counted<I210Buffer>(width, height, width,
|
||||
(width + 1) / 2, (width + 1) / 2);
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I210Buffer> I210Buffer::Copy(
|
||||
const I210BufferInterface& source) {
|
||||
const int width = source.width();
|
||||
const int height = source.height();
|
||||
rtc::scoped_refptr<I210Buffer> buffer = Create(width, height);
|
||||
RTC_CHECK_EQ(
|
||||
0, libyuv::I210Copy(
|
||||
source.DataY(), source.StrideY(), source.DataU(), source.StrideU(),
|
||||
source.DataV(), source.StrideV(), buffer->MutableDataY(),
|
||||
buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(),
|
||||
buffer->MutableDataV(), buffer->StrideV(), width, height));
|
||||
return buffer;
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I210Buffer> I210Buffer::Copy(
|
||||
const I420BufferInterface& source) {
|
||||
const int width = source.width();
|
||||
const int height = source.height();
|
||||
auto i422buffer = I422Buffer::Copy(source);
|
||||
rtc::scoped_refptr<I210Buffer> buffer = Create(width, height);
|
||||
RTC_CHECK_EQ(0, libyuv::I422ToI210(i422buffer->DataY(), i422buffer->StrideY(),
|
||||
i422buffer->DataU(), i422buffer->StrideU(),
|
||||
i422buffer->DataV(), i422buffer->StrideV(),
|
||||
buffer->MutableDataY(), buffer->StrideY(),
|
||||
buffer->MutableDataU(), buffer->StrideU(),
|
||||
buffer->MutableDataV(), buffer->StrideV(),
|
||||
width, height));
|
||||
return buffer;
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I210Buffer> I210Buffer::Rotate(
|
||||
const I210BufferInterface& src,
|
||||
VideoRotation rotation) {
|
||||
RTC_CHECK(src.DataY());
|
||||
RTC_CHECK(src.DataU());
|
||||
RTC_CHECK(src.DataV());
|
||||
|
||||
int rotated_width = src.width();
|
||||
int rotated_height = src.height();
|
||||
if (rotation == webrtc::kVideoRotation_90 ||
|
||||
rotation == webrtc::kVideoRotation_270) {
|
||||
std::swap(rotated_width, rotated_height);
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<webrtc::I210Buffer> buffer =
|
||||
I210Buffer::Create(rotated_width, rotated_height);
|
||||
|
||||
RTC_CHECK_EQ(0,
|
||||
libyuv::I210Rotate(
|
||||
src.DataY(), src.StrideY(), src.DataU(), src.StrideU(),
|
||||
src.DataV(), src.StrideV(), buffer->MutableDataY(),
|
||||
buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(),
|
||||
buffer->MutableDataV(), buffer->StrideV(), src.width(),
|
||||
src.height(), static_cast<libyuv::RotationMode>(rotation)));
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<I420BufferInterface> I210Buffer::ToI420() {
|
||||
rtc::scoped_refptr<I420Buffer> i420_buffer =
|
||||
I420Buffer::Create(width(), height());
|
||||
libyuv::I210ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(),
|
||||
i420_buffer->MutableDataY(), i420_buffer->StrideY(),
|
||||
i420_buffer->MutableDataU(), i420_buffer->StrideU(),
|
||||
i420_buffer->MutableDataV(), i420_buffer->StrideV(),
|
||||
width(), height());
|
||||
return i420_buffer;
|
||||
}
|
||||
|
||||
int I210Buffer::width() const {
|
||||
return width_;
|
||||
}
|
||||
|
||||
int I210Buffer::height() const {
|
||||
return height_;
|
||||
}
|
||||
|
||||
const uint16_t* I210Buffer::DataY() const {
|
||||
return data_.get();
|
||||
}
|
||||
const uint16_t* I210Buffer::DataU() const {
|
||||
return data_.get() + stride_y_ * height_;
|
||||
}
|
||||
const uint16_t* I210Buffer::DataV() const {
|
||||
return data_.get() + stride_y_ * height_ + stride_u_ * height_;
|
||||
}
|
||||
|
||||
int I210Buffer::StrideY() const {
|
||||
return stride_y_;
|
||||
}
|
||||
int I210Buffer::StrideU() const {
|
||||
return stride_u_;
|
||||
}
|
||||
int I210Buffer::StrideV() const {
|
||||
return stride_v_;
|
||||
}
|
||||
|
||||
uint16_t* I210Buffer::MutableDataY() {
|
||||
return const_cast<uint16_t*>(DataY());
|
||||
}
|
||||
uint16_t* I210Buffer::MutableDataU() {
|
||||
return const_cast<uint16_t*>(DataU());
|
||||
}
|
||||
uint16_t* I210Buffer::MutableDataV() {
|
||||
return const_cast<uint16_t*>(DataV());
|
||||
}
|
||||
|
||||
void I210Buffer::CropAndScaleFrom(const I210BufferInterface& src,
|
||||
int offset_x,
|
||||
int offset_y,
|
||||
int crop_width,
|
||||
int crop_height) {
|
||||
RTC_CHECK_LE(crop_width, src.width());
|
||||
RTC_CHECK_LE(crop_height, src.height());
|
||||
RTC_CHECK_LE(crop_width + offset_x, src.width());
|
||||
RTC_CHECK_LE(crop_height + offset_y, src.height());
|
||||
RTC_CHECK_GE(offset_x, 0);
|
||||
RTC_CHECK_GE(offset_y, 0);
|
||||
RTC_CHECK_GE(crop_width, 0);
|
||||
RTC_CHECK_GE(crop_height, 0);
|
||||
|
||||
// Make sure offset is even so that u/v plane becomes aligned.
|
||||
const int uv_offset_x = offset_x / 2;
|
||||
const int uv_offset_y = offset_y;
|
||||
offset_x = uv_offset_x * 2;
|
||||
|
||||
const uint16_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x;
|
||||
const uint16_t* u_plane =
|
||||
src.DataU() + src.StrideU() * uv_offset_y + uv_offset_x;
|
||||
const uint16_t* v_plane =
|
||||
src.DataV() + src.StrideV() * uv_offset_y + uv_offset_x;
|
||||
int res = libyuv::I422Scale_16(
|
||||
y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane, src.StrideV(),
|
||||
crop_width, crop_height, MutableDataY(), StrideY(), MutableDataU(),
|
||||
StrideU(), MutableDataV(), StrideV(), width(), height(),
|
||||
libyuv::kFilterBox);
|
||||
|
||||
RTC_DCHECK_EQ(res, 0);
|
||||
}
|
||||
|
||||
void I210Buffer::ScaleFrom(const I210BufferInterface& src) {
|
||||
CropAndScaleFrom(src, 0, 0, src.width(), src.height());
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
84
TMessagesProj/jni/voip/webrtc/api/video/i210_buffer.h
Normal file
84
TMessagesProj/jni/voip/webrtc/api/video/i210_buffer.h
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
/*
|
||||
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_I210_BUFFER_H_
|
||||
#define API_VIDEO_I210_BUFFER_H_
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/video/video_frame_buffer.h"
|
||||
#include "api/video/video_rotation.h"
|
||||
#include "rtc_base/memory/aligned_malloc.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Plain I210 (yuv 422 planar 10 bits) buffer in standard memory.
|
||||
class I210Buffer : public I210BufferInterface {
|
||||
public:
|
||||
// Create a new buffer.
|
||||
static rtc::scoped_refptr<I210Buffer> Create(int width, int height);
|
||||
|
||||
// Create a new buffer and copy the pixel data.
|
||||
static rtc::scoped_refptr<I210Buffer> Copy(const I210BufferInterface& buffer);
|
||||
|
||||
// Convert and put I420 buffer into a new buffer.
|
||||
static rtc::scoped_refptr<I210Buffer> Copy(const I420BufferInterface& buffer);
|
||||
|
||||
// Return a rotated copy of `src`.
|
||||
static rtc::scoped_refptr<I210Buffer> Rotate(const I210BufferInterface& src,
|
||||
VideoRotation rotation);
|
||||
|
||||
// VideoFrameBuffer implementation.
|
||||
rtc::scoped_refptr<I420BufferInterface> ToI420() override;
|
||||
|
||||
// PlanarYuv16BBuffer implementation.
|
||||
int width() const override;
|
||||
int height() const override;
|
||||
const uint16_t* DataY() const override;
|
||||
const uint16_t* DataU() const override;
|
||||
const uint16_t* DataV() const override;
|
||||
int StrideY() const override;
|
||||
int StrideU() const override;
|
||||
int StrideV() const override;
|
||||
|
||||
uint16_t* MutableDataY();
|
||||
uint16_t* MutableDataU();
|
||||
uint16_t* MutableDataV();
|
||||
|
||||
// Scale the cropped area of `src` to the size of `this` buffer, and
|
||||
// write the result into `this`.
|
||||
void CropAndScaleFrom(const I210BufferInterface& src,
|
||||
int offset_x,
|
||||
int offset_y,
|
||||
int crop_width,
|
||||
int crop_height);
|
||||
|
||||
// Scale all of `src` to the size of `this` buffer, with no cropping.
|
||||
void ScaleFrom(const I210BufferInterface& src);
|
||||
|
||||
protected:
|
||||
I210Buffer(int width, int height, int stride_y, int stride_u, int stride_v);
|
||||
~I210Buffer() override;
|
||||
|
||||
private:
|
||||
const int width_;
|
||||
const int height_;
|
||||
const int stride_y_;
|
||||
const int stride_u_;
|
||||
const int stride_v_;
|
||||
const std::unique_ptr<uint16_t, AlignedFreeDeleter> data_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_I210_BUFFER_H_
|
||||
221
TMessagesProj/jni/voip/webrtc/api/video/i410_buffer.cc
Normal file
221
TMessagesProj/jni/voip/webrtc/api/video/i410_buffer.cc
Normal file
|
|
@ -0,0 +1,221 @@
|
|||
/*
|
||||
* Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
#include "api/video/i410_buffer.h"
|
||||
|
||||
#include <string.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <utility>
|
||||
|
||||
#include "api/make_ref_counted.h"
|
||||
#include "api/video/i420_buffer.h"
|
||||
#include "rtc_base/checks.h"
|
||||
#include "third_party/libyuv/include/libyuv/convert.h"
|
||||
#include "third_party/libyuv/include/libyuv/planar_functions.h"
|
||||
#include "third_party/libyuv/include/libyuv/scale.h"
|
||||
|
||||
// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
|
||||
static const int kBufferAlignment = 64;
|
||||
static const int kBytesPerPixel = 2;
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace {
|
||||
|
||||
int I410DataSize(int height, int stride_y, int stride_u, int stride_v) {
|
||||
return kBytesPerPixel *
|
||||
(stride_y * height + stride_u * height + stride_v * height);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
I410Buffer::I410Buffer(int width, int height)
|
||||
: I410Buffer(width, height, width, width, width) {}
|
||||
|
||||
I410Buffer::I410Buffer(int width,
|
||||
int height,
|
||||
int stride_y,
|
||||
int stride_u,
|
||||
int stride_v)
|
||||
: width_(width),
|
||||
height_(height),
|
||||
stride_y_(stride_y),
|
||||
stride_u_(stride_u),
|
||||
stride_v_(stride_v),
|
||||
data_(static_cast<uint16_t*>(
|
||||
AlignedMalloc(I410DataSize(height, stride_y, stride_u, stride_v),
|
||||
kBufferAlignment))) {
|
||||
RTC_DCHECK_GT(width, 0);
|
||||
RTC_DCHECK_GT(height, 0);
|
||||
RTC_DCHECK_GE(stride_y, width);
|
||||
RTC_DCHECK_GE(stride_u, width);
|
||||
RTC_DCHECK_GE(stride_v, width);
|
||||
}
|
||||
|
||||
I410Buffer::~I410Buffer() {}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I410Buffer> I410Buffer::Create(int width, int height) {
|
||||
return rtc::make_ref_counted<I410Buffer>(width, height);
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I410Buffer> I410Buffer::Create(int width,
|
||||
int height,
|
||||
int stride_y,
|
||||
int stride_u,
|
||||
int stride_v) {
|
||||
return rtc::make_ref_counted<I410Buffer>(width, height, stride_y, stride_u,
|
||||
stride_v);
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I410Buffer> I410Buffer::Copy(
|
||||
const I410BufferInterface& source) {
|
||||
return Copy(source.width(), source.height(), source.DataY(), source.StrideY(),
|
||||
source.DataU(), source.StrideU(), source.DataV(),
|
||||
source.StrideV());
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I410Buffer> I410Buffer::Copy(int width,
|
||||
int height,
|
||||
const uint16_t* data_y,
|
||||
int stride_y,
|
||||
const uint16_t* data_u,
|
||||
int stride_u,
|
||||
const uint16_t* data_v,
|
||||
int stride_v) {
|
||||
// Note: May use different strides than the input data.
|
||||
rtc::scoped_refptr<I410Buffer> buffer = Create(width, height);
|
||||
int res = libyuv::I410Copy(data_y, stride_y, data_u, stride_u, data_v,
|
||||
stride_v, buffer->MutableDataY(),
|
||||
buffer->StrideY(), buffer->MutableDataU(),
|
||||
buffer->StrideU(), buffer->MutableDataV(),
|
||||
buffer->StrideV(), width, height);
|
||||
RTC_DCHECK_EQ(res, 0);
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I410Buffer> I410Buffer::Rotate(
|
||||
const I410BufferInterface& src,
|
||||
VideoRotation rotation) {
|
||||
RTC_CHECK(src.DataY());
|
||||
RTC_CHECK(src.DataU());
|
||||
RTC_CHECK(src.DataV());
|
||||
|
||||
int rotated_width = src.width();
|
||||
int rotated_height = src.height();
|
||||
if (rotation == webrtc::kVideoRotation_90 ||
|
||||
rotation == webrtc::kVideoRotation_270) {
|
||||
std::swap(rotated_width, rotated_height);
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<webrtc::I410Buffer> buffer =
|
||||
I410Buffer::Create(rotated_width, rotated_height);
|
||||
|
||||
int res = libyuv::I410Rotate(
|
||||
src.DataY(), src.StrideY(), src.DataU(), src.StrideU(), src.DataV(),
|
||||
src.StrideV(), buffer->MutableDataY(), buffer->StrideY(),
|
||||
buffer->MutableDataU(), buffer->StrideU(), buffer->MutableDataV(),
|
||||
buffer->StrideV(), src.width(), src.height(),
|
||||
static_cast<libyuv::RotationMode>(rotation));
|
||||
RTC_DCHECK_EQ(res, 0);
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<I420BufferInterface> I410Buffer::ToI420() {
|
||||
rtc::scoped_refptr<I420Buffer> i420_buffer =
|
||||
I420Buffer::Create(width(), height());
|
||||
int res = libyuv::I410ToI420(
|
||||
DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(),
|
||||
i420_buffer->MutableDataY(), i420_buffer->StrideY(),
|
||||
i420_buffer->MutableDataU(), i420_buffer->StrideU(),
|
||||
i420_buffer->MutableDataV(), i420_buffer->StrideV(), width(), height());
|
||||
RTC_DCHECK_EQ(res, 0);
|
||||
|
||||
return i420_buffer;
|
||||
}
|
||||
|
||||
void I410Buffer::InitializeData() {
|
||||
memset(data_.get(), 0,
|
||||
I410DataSize(height_, stride_y_, stride_u_, stride_v_));
|
||||
}
|
||||
|
||||
int I410Buffer::width() const {
|
||||
return width_;
|
||||
}
|
||||
|
||||
int I410Buffer::height() const {
|
||||
return height_;
|
||||
}
|
||||
|
||||
const uint16_t* I410Buffer::DataY() const {
|
||||
return data_.get();
|
||||
}
|
||||
const uint16_t* I410Buffer::DataU() const {
|
||||
return data_.get() + stride_y_ * height_;
|
||||
}
|
||||
const uint16_t* I410Buffer::DataV() const {
|
||||
return data_.get() + stride_y_ * height_ + stride_u_ * height_;
|
||||
}
|
||||
|
||||
int I410Buffer::StrideY() const {
|
||||
return stride_y_;
|
||||
}
|
||||
int I410Buffer::StrideU() const {
|
||||
return stride_u_;
|
||||
}
|
||||
int I410Buffer::StrideV() const {
|
||||
return stride_v_;
|
||||
}
|
||||
|
||||
uint16_t* I410Buffer::MutableDataY() {
|
||||
return const_cast<uint16_t*>(DataY());
|
||||
}
|
||||
uint16_t* I410Buffer::MutableDataU() {
|
||||
return const_cast<uint16_t*>(DataU());
|
||||
}
|
||||
uint16_t* I410Buffer::MutableDataV() {
|
||||
return const_cast<uint16_t*>(DataV());
|
||||
}
|
||||
|
||||
void I410Buffer::CropAndScaleFrom(const I410BufferInterface& src,
|
||||
int offset_x,
|
||||
int offset_y,
|
||||
int crop_width,
|
||||
int crop_height) {
|
||||
RTC_CHECK_LE(crop_width, src.width());
|
||||
RTC_CHECK_LE(crop_height, src.height());
|
||||
RTC_CHECK_LE(crop_width + offset_x, src.width());
|
||||
RTC_CHECK_LE(crop_height + offset_y, src.height());
|
||||
RTC_CHECK_GE(offset_x, 0);
|
||||
RTC_CHECK_GE(offset_y, 0);
|
||||
|
||||
const uint16_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x;
|
||||
const uint16_t* u_plane = src.DataU() + src.StrideU() * offset_y + offset_x;
|
||||
const uint16_t* v_plane = src.DataV() + src.StrideV() * offset_y + offset_x;
|
||||
int res = libyuv::I444Scale_16(
|
||||
y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane, src.StrideV(),
|
||||
crop_width, crop_height, MutableDataY(), StrideY(), MutableDataU(),
|
||||
StrideU(), MutableDataV(), StrideV(), width(), height(),
|
||||
libyuv::kFilterBox);
|
||||
|
||||
RTC_DCHECK_EQ(res, 0);
|
||||
}
|
||||
|
||||
void I410Buffer::ScaleFrom(const I410BufferInterface& src) {
|
||||
CropAndScaleFrom(src, 0, 0, src.width(), src.height());
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
104
TMessagesProj/jni/voip/webrtc/api/video/i410_buffer.h
Normal file
104
TMessagesProj/jni/voip/webrtc/api/video/i410_buffer.h
Normal file
|
|
@ -0,0 +1,104 @@
|
|||
/*
|
||||
* Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_I410_BUFFER_H_
|
||||
#define API_VIDEO_I410_BUFFER_H_
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/video/video_frame_buffer.h"
|
||||
#include "api/video/video_rotation.h"
|
||||
#include "rtc_base/memory/aligned_malloc.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Plain I410 (yuv 444 planar 10 bits) buffer in standard memory.
|
||||
class RTC_EXPORT I410Buffer : public I410BufferInterface {
|
||||
public:
|
||||
static rtc::scoped_refptr<I410Buffer> Create(int width, int height);
|
||||
static rtc::scoped_refptr<I410Buffer> Create(int width,
|
||||
int height,
|
||||
int stride_y,
|
||||
int stride_u,
|
||||
int stride_v);
|
||||
|
||||
// Create a new buffer and copy the pixel data.
|
||||
static rtc::scoped_refptr<I410Buffer> Copy(const I410BufferInterface& buffer);
|
||||
|
||||
static rtc::scoped_refptr<I410Buffer> Copy(int width,
|
||||
int height,
|
||||
const uint16_t* data_y,
|
||||
int stride_y,
|
||||
const uint16_t* data_u,
|
||||
int stride_u,
|
||||
const uint16_t* data_v,
|
||||
int stride_v);
|
||||
|
||||
// Returns a rotated copy of |src|.
|
||||
static rtc::scoped_refptr<I410Buffer> Rotate(const I410BufferInterface& src,
|
||||
VideoRotation rotation);
|
||||
|
||||
rtc::scoped_refptr<I420BufferInterface> ToI420() final;
|
||||
const I420BufferInterface* GetI420() const final { return nullptr; }
|
||||
|
||||
// Sets all three planes to all zeros. Used to work around for
|
||||
// quirks in memory checkers
|
||||
// (https://bugs.chromium.org/p/libyuv/issues/detail?id=377) and
|
||||
// ffmpeg (http://crbug.com/390941).
|
||||
// TODO(https://crbug.com/390941): Deprecated. Should be deleted if/when those
|
||||
// issues are resolved in a better way. Or in the mean time, use SetBlack.
|
||||
void InitializeData();
|
||||
|
||||
int width() const override;
|
||||
int height() const override;
|
||||
const uint16_t* DataY() const override;
|
||||
const uint16_t* DataU() const override;
|
||||
const uint16_t* DataV() const override;
|
||||
|
||||
int StrideY() const override;
|
||||
int StrideU() const override;
|
||||
int StrideV() const override;
|
||||
|
||||
uint16_t* MutableDataY();
|
||||
uint16_t* MutableDataU();
|
||||
uint16_t* MutableDataV();
|
||||
|
||||
// Scale the cropped area of |src| to the size of |this| buffer, and
|
||||
// write the result into |this|.
|
||||
void CropAndScaleFrom(const I410BufferInterface& src,
|
||||
int offset_x,
|
||||
int offset_y,
|
||||
int crop_width,
|
||||
int crop_height);
|
||||
|
||||
// Scale all of `src` to the size of `this` buffer, with no cropping.
|
||||
void ScaleFrom(const I410BufferInterface& src);
|
||||
|
||||
protected:
|
||||
I410Buffer(int width, int height);
|
||||
I410Buffer(int width, int height, int stride_y, int stride_u, int stride_v);
|
||||
|
||||
~I410Buffer() override;
|
||||
|
||||
private:
|
||||
const int width_;
|
||||
const int height_;
|
||||
const int stride_y_;
|
||||
const int stride_u_;
|
||||
const int stride_v_;
|
||||
const std::unique_ptr<uint16_t, AlignedFreeDeleter> data_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_I410_BUFFER_H_
|
||||
232
TMessagesProj/jni/voip/webrtc/api/video/i420_buffer.cc
Normal file
232
TMessagesProj/jni/voip/webrtc/api/video/i420_buffer.cc
Normal file
|
|
@ -0,0 +1,232 @@
|
|||
/*
|
||||
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
#include "api/video/i420_buffer.h"
|
||||
|
||||
#include <string.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <utility>
|
||||
|
||||
#include "api/make_ref_counted.h"
|
||||
#include "rtc_base/checks.h"
|
||||
#include "third_party/libyuv/include/libyuv/convert.h"
|
||||
#include "third_party/libyuv/include/libyuv/planar_functions.h"
|
||||
#include "third_party/libyuv/include/libyuv/scale.h"
|
||||
|
||||
// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
|
||||
static const int kBufferAlignment = 64;
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace {
|
||||
|
||||
int I420DataSize(int height, int stride_y, int stride_u, int stride_v) {
|
||||
return stride_y * height + (stride_u + stride_v) * ((height + 1) / 2);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
I420Buffer::I420Buffer(int width, int height)
|
||||
: I420Buffer(width, height, width, (width + 1) / 2, (width + 1) / 2) {}
|
||||
|
||||
I420Buffer::I420Buffer(int width,
|
||||
int height,
|
||||
int stride_y,
|
||||
int stride_u,
|
||||
int stride_v)
|
||||
: width_(width),
|
||||
height_(height),
|
||||
stride_y_(stride_y),
|
||||
stride_u_(stride_u),
|
||||
stride_v_(stride_v),
|
||||
data_(static_cast<uint8_t*>(
|
||||
AlignedMalloc(I420DataSize(height, stride_y, stride_u, stride_v),
|
||||
kBufferAlignment))) {
|
||||
RTC_DCHECK_GT(width, 0);
|
||||
RTC_DCHECK_GT(height, 0);
|
||||
RTC_DCHECK_GE(stride_y, width);
|
||||
RTC_DCHECK_GE(stride_u, (width + 1) / 2);
|
||||
RTC_DCHECK_GE(stride_v, (width + 1) / 2);
|
||||
}
|
||||
|
||||
I420Buffer::~I420Buffer() {}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I420Buffer> I420Buffer::Create(int width, int height) {
|
||||
return rtc::make_ref_counted<I420Buffer>(width, height);
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I420Buffer> I420Buffer::Create(int width,
|
||||
int height,
|
||||
int stride_y,
|
||||
int stride_u,
|
||||
int stride_v) {
|
||||
return rtc::make_ref_counted<I420Buffer>(width, height, stride_y, stride_u,
|
||||
stride_v);
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I420Buffer> I420Buffer::Copy(
|
||||
const I420BufferInterface& source) {
|
||||
return Copy(source.width(), source.height(), source.DataY(), source.StrideY(),
|
||||
source.DataU(), source.StrideU(), source.DataV(),
|
||||
source.StrideV());
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I420Buffer> I420Buffer::Copy(int width,
|
||||
int height,
|
||||
const uint8_t* data_y,
|
||||
int stride_y,
|
||||
const uint8_t* data_u,
|
||||
int stride_u,
|
||||
const uint8_t* data_v,
|
||||
int stride_v) {
|
||||
// Note: May use different strides than the input data.
|
||||
rtc::scoped_refptr<I420Buffer> buffer = Create(width, height);
|
||||
RTC_CHECK_EQ(0, libyuv::I420Copy(data_y, stride_y, data_u, stride_u, data_v,
|
||||
stride_v, buffer->MutableDataY(),
|
||||
buffer->StrideY(), buffer->MutableDataU(),
|
||||
buffer->StrideU(), buffer->MutableDataV(),
|
||||
buffer->StrideV(), width, height));
|
||||
return buffer;
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I420Buffer> I420Buffer::Rotate(
|
||||
const I420BufferInterface& src,
|
||||
VideoRotation rotation) {
|
||||
RTC_CHECK(src.DataY());
|
||||
RTC_CHECK(src.DataU());
|
||||
RTC_CHECK(src.DataV());
|
||||
|
||||
int rotated_width = src.width();
|
||||
int rotated_height = src.height();
|
||||
if (rotation == webrtc::kVideoRotation_90 ||
|
||||
rotation == webrtc::kVideoRotation_270) {
|
||||
std::swap(rotated_width, rotated_height);
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<webrtc::I420Buffer> buffer =
|
||||
I420Buffer::Create(rotated_width, rotated_height);
|
||||
|
||||
RTC_CHECK_EQ(0,
|
||||
libyuv::I420Rotate(
|
||||
src.DataY(), src.StrideY(), src.DataU(), src.StrideU(),
|
||||
src.DataV(), src.StrideV(), buffer->MutableDataY(),
|
||||
buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(),
|
||||
buffer->MutableDataV(), buffer->StrideV(), src.width(),
|
||||
src.height(), static_cast<libyuv::RotationMode>(rotation)));
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
void I420Buffer::InitializeData() {
|
||||
memset(data_.get(), 0,
|
||||
I420DataSize(height_, stride_y_, stride_u_, stride_v_));
|
||||
}
|
||||
|
||||
int I420Buffer::width() const {
|
||||
return width_;
|
||||
}
|
||||
|
||||
int I420Buffer::height() const {
|
||||
return height_;
|
||||
}
|
||||
|
||||
const uint8_t* I420Buffer::DataY() const {
|
||||
return data_.get();
|
||||
}
|
||||
const uint8_t* I420Buffer::DataU() const {
|
||||
return data_.get() + stride_y_ * height_;
|
||||
}
|
||||
const uint8_t* I420Buffer::DataV() const {
|
||||
return data_.get() + stride_y_ * height_ + stride_u_ * ((height_ + 1) / 2);
|
||||
}
|
||||
|
||||
int I420Buffer::StrideY() const {
|
||||
return stride_y_;
|
||||
}
|
||||
int I420Buffer::StrideU() const {
|
||||
return stride_u_;
|
||||
}
|
||||
int I420Buffer::StrideV() const {
|
||||
return stride_v_;
|
||||
}
|
||||
|
||||
uint8_t* I420Buffer::MutableDataY() {
|
||||
return const_cast<uint8_t*>(DataY());
|
||||
}
|
||||
uint8_t* I420Buffer::MutableDataU() {
|
||||
return const_cast<uint8_t*>(DataU());
|
||||
}
|
||||
uint8_t* I420Buffer::MutableDataV() {
|
||||
return const_cast<uint8_t*>(DataV());
|
||||
}
|
||||
|
||||
// static
|
||||
void I420Buffer::SetBlack(I420Buffer* buffer) {
|
||||
RTC_CHECK(libyuv::I420Rect(buffer->MutableDataY(), buffer->StrideY(),
|
||||
buffer->MutableDataU(), buffer->StrideU(),
|
||||
buffer->MutableDataV(), buffer->StrideV(), 0, 0,
|
||||
buffer->width(), buffer->height(), 0, 128,
|
||||
128) == 0);
|
||||
}
|
||||
|
||||
void I420Buffer::CropAndScaleFrom(const I420BufferInterface& src,
|
||||
int offset_x,
|
||||
int offset_y,
|
||||
int crop_width,
|
||||
int crop_height) {
|
||||
RTC_CHECK_LE(crop_width, src.width());
|
||||
RTC_CHECK_LE(crop_height, src.height());
|
||||
RTC_CHECK_LE(crop_width + offset_x, src.width());
|
||||
RTC_CHECK_LE(crop_height + offset_y, src.height());
|
||||
RTC_CHECK_GE(offset_x, 0);
|
||||
RTC_CHECK_GE(offset_y, 0);
|
||||
|
||||
// Make sure offset is even so that u/v plane becomes aligned.
|
||||
const int uv_offset_x = offset_x / 2;
|
||||
const int uv_offset_y = offset_y / 2;
|
||||
offset_x = uv_offset_x * 2;
|
||||
offset_y = uv_offset_y * 2;
|
||||
|
||||
const uint8_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x;
|
||||
const uint8_t* u_plane =
|
||||
src.DataU() + src.StrideU() * uv_offset_y + uv_offset_x;
|
||||
const uint8_t* v_plane =
|
||||
src.DataV() + src.StrideV() * uv_offset_y + uv_offset_x;
|
||||
int res =
|
||||
libyuv::I420Scale(y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane,
|
||||
src.StrideV(), crop_width, crop_height, MutableDataY(),
|
||||
StrideY(), MutableDataU(), StrideU(), MutableDataV(),
|
||||
StrideV(), width(), height(), libyuv::kFilterBox);
|
||||
|
||||
RTC_DCHECK_EQ(res, 0);
|
||||
}
|
||||
|
||||
void I420Buffer::CropAndScaleFrom(const I420BufferInterface& src) {
|
||||
const int crop_width =
|
||||
height() > 0 ? std::min(src.width(), width() * src.height() / height())
|
||||
: src.width();
|
||||
const int crop_height =
|
||||
width() > 0 ? std::min(src.height(), height() * src.width() / width())
|
||||
: src.height();
|
||||
|
||||
CropAndScaleFrom(src, (src.width() - crop_width) / 2,
|
||||
(src.height() - crop_height) / 2, crop_width, crop_height);
|
||||
}
|
||||
|
||||
void I420Buffer::ScaleFrom(const I420BufferInterface& src) {
|
||||
CropAndScaleFrom(src, 0, 0, src.width(), src.height());
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
118
TMessagesProj/jni/voip/webrtc/api/video/i420_buffer.h
Normal file
118
TMessagesProj/jni/voip/webrtc/api/video/i420_buffer.h
Normal file
|
|
@ -0,0 +1,118 @@
|
|||
/*
|
||||
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_I420_BUFFER_H_
|
||||
#define API_VIDEO_I420_BUFFER_H_
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/video/video_frame_buffer.h"
|
||||
#include "api/video/video_rotation.h"
|
||||
#include "rtc_base/memory/aligned_malloc.h"
|
||||
#include "rtc_base/system/rtc_export.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Plain I420 buffer in standard memory.
|
||||
class RTC_EXPORT I420Buffer : public I420BufferInterface {
|
||||
public:
|
||||
static rtc::scoped_refptr<I420Buffer> Create(int width, int height);
|
||||
static rtc::scoped_refptr<I420Buffer> Create(int width,
|
||||
int height,
|
||||
int stride_y,
|
||||
int stride_u,
|
||||
int stride_v);
|
||||
|
||||
// Create a new buffer and copy the pixel data.
|
||||
static rtc::scoped_refptr<I420Buffer> Copy(const I420BufferInterface& buffer);
|
||||
// Deprecated.
|
||||
static rtc::scoped_refptr<I420Buffer> Copy(const VideoFrameBuffer& buffer) {
|
||||
return Copy(*buffer.GetI420());
|
||||
}
|
||||
|
||||
static rtc::scoped_refptr<I420Buffer> Copy(int width,
|
||||
int height,
|
||||
const uint8_t* data_y,
|
||||
int stride_y,
|
||||
const uint8_t* data_u,
|
||||
int stride_u,
|
||||
const uint8_t* data_v,
|
||||
int stride_v);
|
||||
|
||||
// Returns a rotated copy of `src`.
|
||||
static rtc::scoped_refptr<I420Buffer> Rotate(const I420BufferInterface& src,
|
||||
VideoRotation rotation);
|
||||
// Deprecated.
|
||||
static rtc::scoped_refptr<I420Buffer> Rotate(const VideoFrameBuffer& src,
|
||||
VideoRotation rotation) {
|
||||
return Rotate(*src.GetI420(), rotation);
|
||||
}
|
||||
|
||||
// Sets the buffer to all black.
|
||||
static void SetBlack(I420Buffer* buffer);
|
||||
|
||||
// Sets all three planes to all zeros. Used to work around for
|
||||
// quirks in memory checkers
|
||||
// (https://bugs.chromium.org/p/libyuv/issues/detail?id=377) and
|
||||
// ffmpeg (http://crbug.com/390941).
|
||||
// TODO(https://crbug.com/390941): Deprecated. Should be deleted if/when those
|
||||
// issues are resolved in a better way. Or in the mean time, use SetBlack.
|
||||
void InitializeData();
|
||||
|
||||
int width() const override;
|
||||
int height() const override;
|
||||
const uint8_t* DataY() const override;
|
||||
const uint8_t* DataU() const override;
|
||||
const uint8_t* DataV() const override;
|
||||
|
||||
int StrideY() const override;
|
||||
int StrideU() const override;
|
||||
int StrideV() const override;
|
||||
|
||||
uint8_t* MutableDataY();
|
||||
uint8_t* MutableDataU();
|
||||
uint8_t* MutableDataV();
|
||||
|
||||
// Scale the cropped area of `src` to the size of `this` buffer, and
|
||||
// write the result into `this`.
|
||||
void CropAndScaleFrom(const I420BufferInterface& src,
|
||||
int offset_x,
|
||||
int offset_y,
|
||||
int crop_width,
|
||||
int crop_height);
|
||||
|
||||
// The common case of a center crop, when needed to adjust the
|
||||
// aspect ratio without distorting the image.
|
||||
void CropAndScaleFrom(const I420BufferInterface& src);
|
||||
|
||||
// Scale all of `src` to the size of `this` buffer, with no cropping.
|
||||
void ScaleFrom(const I420BufferInterface& src);
|
||||
|
||||
protected:
|
||||
I420Buffer(int width, int height);
|
||||
I420Buffer(int width, int height, int stride_y, int stride_u, int stride_v);
|
||||
|
||||
~I420Buffer() override;
|
||||
|
||||
private:
|
||||
const int width_;
|
||||
const int height_;
|
||||
const int stride_y_;
|
||||
const int stride_u_;
|
||||
const int stride_v_;
|
||||
const std::unique_ptr<uint8_t, AlignedFreeDeleter> data_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_I420_BUFFER_H_
|
||||
237
TMessagesProj/jni/voip/webrtc/api/video/i422_buffer.cc
Normal file
237
TMessagesProj/jni/voip/webrtc/api/video/i422_buffer.cc
Normal file
|
|
@ -0,0 +1,237 @@
|
|||
/*
|
||||
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
#include "api/video/i422_buffer.h"
|
||||
|
||||
#include <string.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <utility>
|
||||
|
||||
#include "api/make_ref_counted.h"
|
||||
#include "api/video/i420_buffer.h"
|
||||
#include "rtc_base/checks.h"
|
||||
#include "third_party/libyuv/include/libyuv/convert.h"
|
||||
#include "third_party/libyuv/include/libyuv/planar_functions.h"
|
||||
#include "third_party/libyuv/include/libyuv/scale.h"
|
||||
|
||||
// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
|
||||
static const int kBufferAlignment = 64;
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace {
|
||||
|
||||
int I422DataSize(int height, int stride_y, int stride_u, int stride_v) {
|
||||
return stride_y * height + stride_u * height + stride_v * height;
|
||||
}
|
||||
} // namespace
|
||||
|
||||
I422Buffer::I422Buffer(int width, int height)
|
||||
: I422Buffer(width, height, width, (width + 1) / 2, (width + 1) / 2) {}
|
||||
|
||||
I422Buffer::I422Buffer(int width,
|
||||
int height,
|
||||
int stride_y,
|
||||
int stride_u,
|
||||
int stride_v)
|
||||
: width_(width),
|
||||
height_(height),
|
||||
stride_y_(stride_y),
|
||||
stride_u_(stride_u),
|
||||
stride_v_(stride_v),
|
||||
data_(static_cast<uint8_t*>(
|
||||
AlignedMalloc(I422DataSize(height, stride_y, stride_u, stride_v),
|
||||
kBufferAlignment))) {
|
||||
RTC_DCHECK_GT(width, 0);
|
||||
RTC_DCHECK_GT(height, 0);
|
||||
RTC_DCHECK_GE(stride_y, width);
|
||||
RTC_DCHECK_GE(stride_u, (width + 1) / 2);
|
||||
RTC_DCHECK_GE(stride_v, (width + 1) / 2);
|
||||
}
|
||||
|
||||
I422Buffer::~I422Buffer() {}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I422Buffer> I422Buffer::Create(int width, int height) {
|
||||
return rtc::make_ref_counted<I422Buffer>(width, height);
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I422Buffer> I422Buffer::Create(int width,
|
||||
int height,
|
||||
int stride_y,
|
||||
int stride_u,
|
||||
int stride_v) {
|
||||
return rtc::make_ref_counted<I422Buffer>(width, height, stride_y, stride_u,
|
||||
stride_v);
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I422Buffer> I422Buffer::Copy(
|
||||
const I422BufferInterface& source) {
|
||||
return Copy(source.width(), source.height(), source.DataY(), source.StrideY(),
|
||||
source.DataU(), source.StrideU(), source.DataV(),
|
||||
source.StrideV());
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I422Buffer> I422Buffer::Copy(
|
||||
const I420BufferInterface& source) {
|
||||
const int width = source.width();
|
||||
const int height = source.height();
|
||||
rtc::scoped_refptr<I422Buffer> buffer = Create(width, height);
|
||||
int res = libyuv::I420ToI422(
|
||||
source.DataY(), source.StrideY(), source.DataU(), source.StrideU(),
|
||||
source.DataV(), source.StrideV(), buffer->MutableDataY(),
|
||||
buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(),
|
||||
buffer->MutableDataV(), buffer->StrideV(), width, height);
|
||||
RTC_DCHECK_EQ(res, 0);
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I422Buffer> I422Buffer::Copy(int width,
|
||||
int height,
|
||||
const uint8_t* data_y,
|
||||
int stride_y,
|
||||
const uint8_t* data_u,
|
||||
int stride_u,
|
||||
const uint8_t* data_v,
|
||||
int stride_v) {
|
||||
// Note: May use different strides than the input data.
|
||||
rtc::scoped_refptr<I422Buffer> buffer = Create(width, height);
|
||||
int res = libyuv::I422Copy(data_y, stride_y, data_u, stride_u, data_v,
|
||||
stride_v, buffer->MutableDataY(),
|
||||
buffer->StrideY(), buffer->MutableDataU(),
|
||||
buffer->StrideU(), buffer->MutableDataV(),
|
||||
buffer->StrideV(), width, height);
|
||||
RTC_DCHECK_EQ(res, 0);
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I422Buffer> I422Buffer::Rotate(
|
||||
const I422BufferInterface& src,
|
||||
VideoRotation rotation) {
|
||||
RTC_CHECK(src.DataY());
|
||||
RTC_CHECK(src.DataU());
|
||||
RTC_CHECK(src.DataV());
|
||||
|
||||
int rotated_width = src.width();
|
||||
int rotated_height = src.height();
|
||||
if (rotation == webrtc::kVideoRotation_90 ||
|
||||
rotation == webrtc::kVideoRotation_270) {
|
||||
std::swap(rotated_width, rotated_height);
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<webrtc::I422Buffer> buffer =
|
||||
I422Buffer::Create(rotated_width, rotated_height);
|
||||
|
||||
int res = libyuv::I422Rotate(
|
||||
src.DataY(), src.StrideY(), src.DataU(), src.StrideU(), src.DataV(),
|
||||
src.StrideV(), buffer->MutableDataY(), buffer->StrideY(),
|
||||
buffer->MutableDataU(), buffer->StrideU(), buffer->MutableDataV(),
|
||||
buffer->StrideV(), src.width(), src.height(),
|
||||
static_cast<libyuv::RotationMode>(rotation));
|
||||
RTC_DCHECK_EQ(res, 0);
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<I420BufferInterface> I422Buffer::ToI420() {
|
||||
rtc::scoped_refptr<I420Buffer> i420_buffer =
|
||||
I420Buffer::Create(width(), height());
|
||||
int res = libyuv::I422ToI420(
|
||||
DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(),
|
||||
i420_buffer->MutableDataY(), i420_buffer->StrideY(),
|
||||
i420_buffer->MutableDataU(), i420_buffer->StrideU(),
|
||||
i420_buffer->MutableDataV(), i420_buffer->StrideV(), width(), height());
|
||||
RTC_DCHECK_EQ(res, 0);
|
||||
|
||||
return i420_buffer;
|
||||
}
|
||||
|
||||
void I422Buffer::InitializeData() {
|
||||
memset(data_.get(), 0,
|
||||
I422DataSize(height_, stride_y_, stride_u_, stride_v_));
|
||||
}
|
||||
|
||||
int I422Buffer::width() const {
|
||||
return width_;
|
||||
}
|
||||
|
||||
int I422Buffer::height() const {
|
||||
return height_;
|
||||
}
|
||||
|
||||
const uint8_t* I422Buffer::DataY() const {
|
||||
return data_.get();
|
||||
}
|
||||
const uint8_t* I422Buffer::DataU() const {
|
||||
return data_.get() + stride_y_ * height_;
|
||||
}
|
||||
const uint8_t* I422Buffer::DataV() const {
|
||||
return data_.get() + stride_y_ * height_ + stride_u_ * height_;
|
||||
}
|
||||
|
||||
int I422Buffer::StrideY() const {
|
||||
return stride_y_;
|
||||
}
|
||||
int I422Buffer::StrideU() const {
|
||||
return stride_u_;
|
||||
}
|
||||
int I422Buffer::StrideV() const {
|
||||
return stride_v_;
|
||||
}
|
||||
|
||||
uint8_t* I422Buffer::MutableDataY() {
|
||||
return const_cast<uint8_t*>(DataY());
|
||||
}
|
||||
uint8_t* I422Buffer::MutableDataU() {
|
||||
return const_cast<uint8_t*>(DataU());
|
||||
}
|
||||
uint8_t* I422Buffer::MutableDataV() {
|
||||
return const_cast<uint8_t*>(DataV());
|
||||
}
|
||||
|
||||
void I422Buffer::CropAndScaleFrom(const I422BufferInterface& src,
|
||||
int offset_x,
|
||||
int offset_y,
|
||||
int crop_width,
|
||||
int crop_height) {
|
||||
RTC_CHECK_LE(crop_width, src.width());
|
||||
RTC_CHECK_LE(crop_height, src.height());
|
||||
RTC_CHECK_LE(crop_width + offset_x, src.width());
|
||||
RTC_CHECK_LE(crop_height + offset_y, src.height());
|
||||
RTC_CHECK_GE(offset_x, 0);
|
||||
RTC_CHECK_GE(offset_y, 0);
|
||||
|
||||
// Make sure offset is even so that u/v plane becomes aligned.
|
||||
const int uv_offset_x = offset_x / 2;
|
||||
const int uv_offset_y = offset_y;
|
||||
offset_x = uv_offset_x * 2;
|
||||
|
||||
const uint8_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x;
|
||||
const uint8_t* u_plane =
|
||||
src.DataU() + src.StrideU() * uv_offset_y + uv_offset_x;
|
||||
const uint8_t* v_plane =
|
||||
src.DataV() + src.StrideV() * uv_offset_y + uv_offset_x;
|
||||
|
||||
int res =
|
||||
libyuv::I422Scale(y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane,
|
||||
src.StrideV(), crop_width, crop_height, MutableDataY(),
|
||||
StrideY(), MutableDataU(), StrideU(), MutableDataV(),
|
||||
StrideV(), width(), height(), libyuv::kFilterBox);
|
||||
RTC_DCHECK_EQ(res, 0);
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
114
TMessagesProj/jni/voip/webrtc/api/video/i422_buffer.h
Normal file
114
TMessagesProj/jni/voip/webrtc/api/video/i422_buffer.h
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
/*
|
||||
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_I422_BUFFER_H_
|
||||
#define API_VIDEO_I422_BUFFER_H_
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/video/video_frame_buffer.h"
|
||||
#include "api/video/video_rotation.h"
|
||||
#include "rtc_base/memory/aligned_malloc.h"
|
||||
#include "rtc_base/system/rtc_export.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Plain I422 buffer in standard memory.
|
||||
class RTC_EXPORT I422Buffer : public I422BufferInterface {
|
||||
public:
|
||||
static rtc::scoped_refptr<I422Buffer> Create(int width, int height);
|
||||
static rtc::scoped_refptr<I422Buffer> Create(int width,
|
||||
int height,
|
||||
int stride_y,
|
||||
int stride_u,
|
||||
int stride_v);
|
||||
|
||||
// Create a new buffer and copy the pixel data.
|
||||
static rtc::scoped_refptr<I422Buffer> Copy(const I422BufferInterface& buffer);
|
||||
/// Convert and put I420 buffer into a new buffer.
|
||||
static rtc::scoped_refptr<I422Buffer> Copy(const I420BufferInterface& buffer);
|
||||
|
||||
static rtc::scoped_refptr<I422Buffer> Copy(int width,
|
||||
int height,
|
||||
const uint8_t* data_y,
|
||||
int stride_y,
|
||||
const uint8_t* data_u,
|
||||
int stride_u,
|
||||
const uint8_t* data_v,
|
||||
int stride_v);
|
||||
|
||||
// Returns a rotated copy of `src`.
|
||||
static rtc::scoped_refptr<I422Buffer> Rotate(const I422BufferInterface& src,
|
||||
VideoRotation rotation);
|
||||
|
||||
rtc::scoped_refptr<I420BufferInterface> ToI420() final;
|
||||
const I420BufferInterface* GetI420() const final { return nullptr; }
|
||||
|
||||
// Sets the buffer to all black.
|
||||
static void SetBlack(I422Buffer* buffer);
|
||||
|
||||
// Sets all three planes to all zeros. Used to work around for
|
||||
// quirks in memory checkers
|
||||
// (https://bugs.chromium.org/p/libyuv/issues/detail?id=377) and
|
||||
// ffmpeg (http://crbug.com/390941).
|
||||
// TODO(https://crbug.com/390941): Deprecated. Should be deleted if/when those
|
||||
// issues are resolved in a better way. Or in the mean time, use SetBlack.
|
||||
void InitializeData();
|
||||
|
||||
int width() const override;
|
||||
int height() const override;
|
||||
const uint8_t* DataY() const override;
|
||||
const uint8_t* DataU() const override;
|
||||
const uint8_t* DataV() const override;
|
||||
|
||||
int StrideY() const override;
|
||||
int StrideU() const override;
|
||||
int StrideV() const override;
|
||||
|
||||
uint8_t* MutableDataY();
|
||||
uint8_t* MutableDataU();
|
||||
uint8_t* MutableDataV();
|
||||
|
||||
// Scale the cropped area of `src` to the size of `this` buffer, and
|
||||
// write the result into `this`.
|
||||
void CropAndScaleFrom(const I422BufferInterface& src,
|
||||
int offset_x,
|
||||
int offset_y,
|
||||
int crop_width,
|
||||
int crop_height);
|
||||
|
||||
// The common case of a center crop, when needed to adjust the
|
||||
// aspect ratio without distorting the image.
|
||||
void CropAndScaleFrom(const I422BufferInterface& src);
|
||||
|
||||
// Scale all of `src` to the size of `this` buffer, with no cropping.
|
||||
void ScaleFrom(const I422BufferInterface& src);
|
||||
|
||||
protected:
|
||||
I422Buffer(int width, int height);
|
||||
I422Buffer(int width, int height, int stride_y, int stride_u, int stride_v);
|
||||
|
||||
~I422Buffer() override;
|
||||
|
||||
private:
|
||||
const int width_;
|
||||
const int height_;
|
||||
const int stride_y_;
|
||||
const int stride_u_;
|
||||
const int stride_v_;
|
||||
const std::unique_ptr<uint8_t, AlignedFreeDeleter> data_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_I422_BUFFER_H_
|
||||
211
TMessagesProj/jni/voip/webrtc/api/video/i444_buffer.cc
Normal file
211
TMessagesProj/jni/voip/webrtc/api/video/i444_buffer.cc
Normal file
|
|
@ -0,0 +1,211 @@
|
|||
/*
|
||||
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
#include "api/video/i444_buffer.h"
|
||||
|
||||
#include <string.h>
|
||||
|
||||
#include <algorithm>
|
||||
#include <utility>
|
||||
|
||||
#include "api/make_ref_counted.h"
|
||||
#include "api/video/i420_buffer.h"
|
||||
#include "rtc_base/checks.h"
|
||||
#include "third_party/libyuv/include/libyuv/convert.h"
|
||||
#include "third_party/libyuv/include/libyuv/planar_functions.h"
|
||||
#include "third_party/libyuv/include/libyuv/scale.h"
|
||||
|
||||
// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
|
||||
static const int kBufferAlignment = 64;
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace {
|
||||
|
||||
int I444DataSize(int height, int stride_y, int stride_u, int stride_v) {
|
||||
return stride_y * height + stride_u * height + stride_v * height;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
I444Buffer::I444Buffer(int width, int height)
|
||||
: I444Buffer(width, height, width, (width), (width)) {}
|
||||
|
||||
I444Buffer::I444Buffer(int width,
|
||||
int height,
|
||||
int stride_y,
|
||||
int stride_u,
|
||||
int stride_v)
|
||||
: width_(width),
|
||||
height_(height),
|
||||
stride_y_(stride_y),
|
||||
stride_u_(stride_u),
|
||||
stride_v_(stride_v),
|
||||
data_(static_cast<uint8_t*>(
|
||||
AlignedMalloc(I444DataSize(height, stride_y, stride_u, stride_v),
|
||||
kBufferAlignment))) {
|
||||
RTC_DCHECK_GT(width, 0);
|
||||
RTC_DCHECK_GT(height, 0);
|
||||
RTC_DCHECK_GE(stride_y, width);
|
||||
RTC_DCHECK_GE(stride_u, (width));
|
||||
RTC_DCHECK_GE(stride_v, (width));
|
||||
}
|
||||
|
||||
I444Buffer::~I444Buffer() {}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I444Buffer> I444Buffer::Create(int width, int height) {
|
||||
return rtc::make_ref_counted<I444Buffer>(width, height);
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I444Buffer> I444Buffer::Create(int width,
|
||||
int height,
|
||||
int stride_y,
|
||||
int stride_u,
|
||||
int stride_v) {
|
||||
return rtc::make_ref_counted<I444Buffer>(width, height, stride_y, stride_u,
|
||||
stride_v);
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I444Buffer> I444Buffer::Copy(
|
||||
const I444BufferInterface& source) {
|
||||
return Copy(source.width(), source.height(), source.DataY(), source.StrideY(),
|
||||
source.DataU(), source.StrideU(), source.DataV(),
|
||||
source.StrideV());
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I444Buffer> I444Buffer::Copy(int width,
|
||||
int height,
|
||||
const uint8_t* data_y,
|
||||
int stride_y,
|
||||
const uint8_t* data_u,
|
||||
int stride_u,
|
||||
const uint8_t* data_v,
|
||||
int stride_v) {
|
||||
// Note: May use different strides than the input data.
|
||||
rtc::scoped_refptr<I444Buffer> buffer = Create(width, height);
|
||||
RTC_CHECK_EQ(0, libyuv::I444Copy(data_y, stride_y, data_u, stride_u, data_v,
|
||||
stride_v, buffer->MutableDataY(),
|
||||
buffer->StrideY(), buffer->MutableDataU(),
|
||||
buffer->StrideU(), buffer->MutableDataV(),
|
||||
buffer->StrideV(), width, height));
|
||||
return buffer;
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<I444Buffer> I444Buffer::Rotate(
|
||||
const I444BufferInterface& src,
|
||||
VideoRotation rotation) {
|
||||
RTC_CHECK(src.DataY());
|
||||
RTC_CHECK(src.DataU());
|
||||
RTC_CHECK(src.DataV());
|
||||
|
||||
int rotated_width = src.width();
|
||||
int rotated_height = src.height();
|
||||
if (rotation == webrtc::kVideoRotation_90 ||
|
||||
rotation == webrtc::kVideoRotation_270) {
|
||||
std::swap(rotated_width, rotated_height);
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<webrtc::I444Buffer> buffer =
|
||||
I444Buffer::Create(rotated_width, rotated_height);
|
||||
|
||||
RTC_CHECK_EQ(0,
|
||||
libyuv::I444Rotate(
|
||||
src.DataY(), src.StrideY(), src.DataU(), src.StrideU(),
|
||||
src.DataV(), src.StrideV(), buffer->MutableDataY(),
|
||||
buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(),
|
||||
buffer->MutableDataV(), buffer->StrideV(), src.width(),
|
||||
src.height(), static_cast<libyuv::RotationMode>(rotation)));
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<I420BufferInterface> I444Buffer::ToI420() {
|
||||
rtc::scoped_refptr<I420Buffer> i420_buffer =
|
||||
I420Buffer::Create(width(), height());
|
||||
libyuv::I444ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(),
|
||||
i420_buffer->MutableDataY(), i420_buffer->StrideY(),
|
||||
i420_buffer->MutableDataU(), i420_buffer->StrideU(),
|
||||
i420_buffer->MutableDataV(), i420_buffer->StrideV(),
|
||||
width(), height());
|
||||
return i420_buffer;
|
||||
}
|
||||
|
||||
void I444Buffer::InitializeData() {
|
||||
memset(data_.get(), 0,
|
||||
I444DataSize(height_, stride_y_, stride_u_, stride_v_));
|
||||
}
|
||||
|
||||
int I444Buffer::width() const {
|
||||
return width_;
|
||||
}
|
||||
|
||||
int I444Buffer::height() const {
|
||||
return height_;
|
||||
}
|
||||
|
||||
const uint8_t* I444Buffer::DataY() const {
|
||||
return data_.get();
|
||||
}
|
||||
const uint8_t* I444Buffer::DataU() const {
|
||||
return data_.get() + stride_y_ * height_;
|
||||
}
|
||||
const uint8_t* I444Buffer::DataV() const {
|
||||
return data_.get() + stride_y_ * height_ + stride_u_ * ((height_));
|
||||
}
|
||||
|
||||
int I444Buffer::StrideY() const {
|
||||
return stride_y_;
|
||||
}
|
||||
int I444Buffer::StrideU() const {
|
||||
return stride_u_;
|
||||
}
|
||||
int I444Buffer::StrideV() const {
|
||||
return stride_v_;
|
||||
}
|
||||
|
||||
uint8_t* I444Buffer::MutableDataY() {
|
||||
return const_cast<uint8_t*>(DataY());
|
||||
}
|
||||
uint8_t* I444Buffer::MutableDataU() {
|
||||
return const_cast<uint8_t*>(DataU());
|
||||
}
|
||||
uint8_t* I444Buffer::MutableDataV() {
|
||||
return const_cast<uint8_t*>(DataV());
|
||||
}
|
||||
|
||||
void I444Buffer::CropAndScaleFrom(const I444BufferInterface& src,
|
||||
int offset_x,
|
||||
int offset_y,
|
||||
int crop_width,
|
||||
int crop_height) {
|
||||
RTC_CHECK_LE(crop_width, src.width());
|
||||
RTC_CHECK_LE(crop_height, src.height());
|
||||
RTC_CHECK_LE(crop_width + offset_x, src.width());
|
||||
RTC_CHECK_LE(crop_height + offset_y, src.height());
|
||||
RTC_CHECK_GE(offset_x, 0);
|
||||
RTC_CHECK_GE(offset_y, 0);
|
||||
|
||||
const uint8_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x;
|
||||
const uint8_t* u_plane = src.DataU() + src.StrideU() * offset_y + offset_x;
|
||||
const uint8_t* v_plane = src.DataV() + src.StrideV() * offset_y + offset_x;
|
||||
int res =
|
||||
libyuv::I444Scale(y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane,
|
||||
src.StrideV(), crop_width, crop_height, MutableDataY(),
|
||||
StrideY(), MutableDataU(), StrideU(), MutableDataV(),
|
||||
StrideV(), width(), height(), libyuv::kFilterBox);
|
||||
|
||||
RTC_DCHECK_EQ(res, 0);
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
104
TMessagesProj/jni/voip/webrtc/api/video/i444_buffer.h
Normal file
104
TMessagesProj/jni/voip/webrtc/api/video/i444_buffer.h
Normal file
|
|
@ -0,0 +1,104 @@
|
|||
/*
|
||||
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_I444_BUFFER_H_
|
||||
#define API_VIDEO_I444_BUFFER_H_
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/video/video_frame_buffer.h"
|
||||
#include "api/video/video_rotation.h"
|
||||
#include "rtc_base/memory/aligned_malloc.h"
|
||||
#include "rtc_base/system/rtc_export.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Plain I444 buffer in standard memory.
|
||||
// I444 represents an image with in YUV format withouth any chroma subsampling.
|
||||
// https://en.wikipedia.org/wiki/Chroma_subsampling#4:4:4
|
||||
class RTC_EXPORT I444Buffer : public I444BufferInterface {
|
||||
public:
|
||||
static rtc::scoped_refptr<I444Buffer> Create(int width, int height);
|
||||
static rtc::scoped_refptr<I444Buffer> Create(int width,
|
||||
int height,
|
||||
int stride_y,
|
||||
int stride_u,
|
||||
int stride_v);
|
||||
|
||||
// Create a new buffer and copy the pixel data.
|
||||
static rtc::scoped_refptr<I444Buffer> Copy(const I444BufferInterface& buffer);
|
||||
|
||||
static rtc::scoped_refptr<I444Buffer> Copy(int width,
|
||||
int height,
|
||||
const uint8_t* data_y,
|
||||
int stride_y,
|
||||
const uint8_t* data_u,
|
||||
int stride_u,
|
||||
const uint8_t* data_v,
|
||||
int stride_v);
|
||||
|
||||
// Returns a rotated copy of |src|.
|
||||
static rtc::scoped_refptr<I444Buffer> Rotate(const I444BufferInterface& src,
|
||||
VideoRotation rotation);
|
||||
|
||||
rtc::scoped_refptr<I420BufferInterface> ToI420() final;
|
||||
const I420BufferInterface* GetI420() const final { return nullptr; }
|
||||
|
||||
// Sets all three planes to all zeros. Used to work around for
|
||||
// quirks in memory checkers
|
||||
// (https://bugs.chromium.org/p/libyuv/issues/detail?id=377) and
|
||||
// ffmpeg (http://crbug.com/390941).
|
||||
// TODO(https://crbug.com/390941): Deprecated. Should be deleted if/when those
|
||||
// issues are resolved in a better way. Or in the mean time, use SetBlack.
|
||||
void InitializeData();
|
||||
|
||||
int width() const override;
|
||||
int height() const override;
|
||||
const uint8_t* DataY() const override;
|
||||
const uint8_t* DataU() const override;
|
||||
const uint8_t* DataV() const override;
|
||||
|
||||
int StrideY() const override;
|
||||
int StrideU() const override;
|
||||
int StrideV() const override;
|
||||
|
||||
uint8_t* MutableDataY();
|
||||
uint8_t* MutableDataU();
|
||||
uint8_t* MutableDataV();
|
||||
|
||||
// Scale the cropped area of |src| to the size of |this| buffer, and
|
||||
// write the result into |this|.
|
||||
void CropAndScaleFrom(const I444BufferInterface& src,
|
||||
int offset_x,
|
||||
int offset_y,
|
||||
int crop_width,
|
||||
int crop_height);
|
||||
|
||||
protected:
|
||||
I444Buffer(int width, int height);
|
||||
I444Buffer(int width, int height, int stride_y, int stride_u, int stride_v);
|
||||
|
||||
~I444Buffer() override;
|
||||
|
||||
private:
|
||||
const int width_;
|
||||
const int height_;
|
||||
const int stride_y_;
|
||||
const int stride_u_;
|
||||
const int stride_v_;
|
||||
const std::unique_ptr<uint8_t, AlignedFreeDeleter> data_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_I444_BUFFER_H_
|
||||
155
TMessagesProj/jni/voip/webrtc/api/video/nv12_buffer.cc
Normal file
155
TMessagesProj/jni/voip/webrtc/api/video/nv12_buffer.cc
Normal file
|
|
@ -0,0 +1,155 @@
|
|||
/*
|
||||
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/nv12_buffer.h"
|
||||
|
||||
#include "api/make_ref_counted.h"
|
||||
#include "api/video/i420_buffer.h"
|
||||
#include "rtc_base/checks.h"
|
||||
#include "third_party/libyuv/include/libyuv/convert.h"
|
||||
#include "third_party/libyuv/include/libyuv/scale.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace {
|
||||
|
||||
static const int kBufferAlignment = 64;
|
||||
|
||||
int NV12DataSize(int height, int stride_y, int stride_uv) {
|
||||
return stride_y * height + stride_uv * ((height + 1) / 2);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
NV12Buffer::NV12Buffer(int width, int height)
|
||||
: NV12Buffer(width, height, width, width + width % 2) {}
|
||||
|
||||
NV12Buffer::NV12Buffer(int width, int height, int stride_y, int stride_uv)
|
||||
: width_(width),
|
||||
height_(height),
|
||||
stride_y_(stride_y),
|
||||
stride_uv_(stride_uv),
|
||||
data_(static_cast<uint8_t*>(
|
||||
AlignedMalloc(NV12DataSize(height_, stride_y_, stride_uv),
|
||||
kBufferAlignment))) {
|
||||
RTC_DCHECK_GT(width, 0);
|
||||
RTC_DCHECK_GT(height, 0);
|
||||
RTC_DCHECK_GE(stride_y, width);
|
||||
RTC_DCHECK_GE(stride_uv, (width + width % 2));
|
||||
}
|
||||
|
||||
NV12Buffer::~NV12Buffer() = default;
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<NV12Buffer> NV12Buffer::Create(int width, int height) {
|
||||
return rtc::make_ref_counted<NV12Buffer>(width, height);
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<NV12Buffer> NV12Buffer::Create(int width,
|
||||
int height,
|
||||
int stride_y,
|
||||
int stride_uv) {
|
||||
return rtc::make_ref_counted<NV12Buffer>(width, height, stride_y, stride_uv);
|
||||
}
|
||||
|
||||
// static
|
||||
rtc::scoped_refptr<NV12Buffer> NV12Buffer::Copy(
|
||||
const I420BufferInterface& i420_buffer) {
|
||||
rtc::scoped_refptr<NV12Buffer> buffer =
|
||||
NV12Buffer::Create(i420_buffer.width(), i420_buffer.height());
|
||||
libyuv::I420ToNV12(
|
||||
i420_buffer.DataY(), i420_buffer.StrideY(), i420_buffer.DataU(),
|
||||
i420_buffer.StrideU(), i420_buffer.DataV(), i420_buffer.StrideV(),
|
||||
buffer->MutableDataY(), buffer->StrideY(), buffer->MutableDataUV(),
|
||||
buffer->StrideUV(), buffer->width(), buffer->height());
|
||||
return buffer;
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<I420BufferInterface> NV12Buffer::ToI420() {
|
||||
rtc::scoped_refptr<I420Buffer> i420_buffer =
|
||||
I420Buffer::Create(width(), height());
|
||||
libyuv::NV12ToI420(DataY(), StrideY(), DataUV(), StrideUV(),
|
||||
i420_buffer->MutableDataY(), i420_buffer->StrideY(),
|
||||
i420_buffer->MutableDataU(), i420_buffer->StrideU(),
|
||||
i420_buffer->MutableDataV(), i420_buffer->StrideV(),
|
||||
width(), height());
|
||||
return i420_buffer;
|
||||
}
|
||||
|
||||
int NV12Buffer::width() const {
|
||||
return width_;
|
||||
}
|
||||
int NV12Buffer::height() const {
|
||||
return height_;
|
||||
}
|
||||
|
||||
int NV12Buffer::StrideY() const {
|
||||
return stride_y_;
|
||||
}
|
||||
int NV12Buffer::StrideUV() const {
|
||||
return stride_uv_;
|
||||
}
|
||||
|
||||
const uint8_t* NV12Buffer::DataY() const {
|
||||
return data_.get();
|
||||
}
|
||||
|
||||
const uint8_t* NV12Buffer::DataUV() const {
|
||||
return data_.get() + UVOffset();
|
||||
}
|
||||
|
||||
uint8_t* NV12Buffer::MutableDataY() {
|
||||
return data_.get();
|
||||
}
|
||||
|
||||
uint8_t* NV12Buffer::MutableDataUV() {
|
||||
return data_.get() + UVOffset();
|
||||
}
|
||||
|
||||
size_t NV12Buffer::UVOffset() const {
|
||||
return stride_y_ * height_;
|
||||
}
|
||||
|
||||
void NV12Buffer::InitializeData() {
|
||||
memset(data_.get(), 0, NV12DataSize(height_, stride_y_, stride_uv_));
|
||||
}
|
||||
|
||||
void NV12Buffer::CropAndScaleFrom(const NV12BufferInterface& src,
|
||||
int offset_x,
|
||||
int offset_y,
|
||||
int crop_width,
|
||||
int crop_height) {
|
||||
RTC_CHECK_LE(crop_width, src.width());
|
||||
RTC_CHECK_LE(crop_height, src.height());
|
||||
RTC_CHECK_LE(crop_width + offset_x, src.width());
|
||||
RTC_CHECK_LE(crop_height + offset_y, src.height());
|
||||
RTC_CHECK_GE(offset_x, 0);
|
||||
RTC_CHECK_GE(offset_y, 0);
|
||||
|
||||
// Make sure offset is even so that u/v plane becomes aligned.
|
||||
const int uv_offset_x = offset_x / 2;
|
||||
const int uv_offset_y = offset_y / 2;
|
||||
offset_x = uv_offset_x * 2;
|
||||
offset_y = uv_offset_y * 2;
|
||||
|
||||
const uint8_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x;
|
||||
const uint8_t* uv_plane =
|
||||
src.DataUV() + src.StrideUV() * uv_offset_y + uv_offset_x * 2;
|
||||
|
||||
int res = libyuv::NV12Scale(y_plane, src.StrideY(), uv_plane, src.StrideUV(),
|
||||
crop_width, crop_height, MutableDataY(),
|
||||
StrideY(), MutableDataUV(), StrideUV(), width(),
|
||||
height(), libyuv::kFilterBox);
|
||||
|
||||
RTC_DCHECK_EQ(res, 0);
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
85
TMessagesProj/jni/voip/webrtc/api/video/nv12_buffer.h
Normal file
85
TMessagesProj/jni/voip/webrtc/api/video/nv12_buffer.h
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
/*
|
||||
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_NV12_BUFFER_H_
|
||||
#define API_VIDEO_NV12_BUFFER_H_
|
||||
|
||||
#include <memory>
|
||||
#include <utility>
|
||||
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/video/video_frame_buffer.h"
|
||||
#include "rtc_base/memory/aligned_malloc.h"
|
||||
#include "rtc_base/system/rtc_export.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// NV12 is a biplanar encoding format, with full-resolution Y and
|
||||
// half-resolution interleved UV. More information can be found at
|
||||
// http://msdn.microsoft.com/library/windows/desktop/dd206750.aspx#nv12.
|
||||
class RTC_EXPORT NV12Buffer : public NV12BufferInterface {
|
||||
public:
|
||||
static rtc::scoped_refptr<NV12Buffer> Create(int width, int height);
|
||||
static rtc::scoped_refptr<NV12Buffer> Create(int width,
|
||||
int height,
|
||||
int stride_y,
|
||||
int stride_uv);
|
||||
static rtc::scoped_refptr<NV12Buffer> Copy(
|
||||
const I420BufferInterface& i420_buffer);
|
||||
|
||||
rtc::scoped_refptr<I420BufferInterface> ToI420() override;
|
||||
|
||||
int width() const override;
|
||||
int height() const override;
|
||||
|
||||
int StrideY() const override;
|
||||
int StrideUV() const override;
|
||||
|
||||
const uint8_t* DataY() const override;
|
||||
const uint8_t* DataUV() const override;
|
||||
|
||||
uint8_t* MutableDataY();
|
||||
uint8_t* MutableDataUV();
|
||||
|
||||
// Sets all three planes to all zeros. Used to work around for
|
||||
// quirks in memory checkers
|
||||
// (https://bugs.chromium.org/p/libyuv/issues/detail?id=377) and
|
||||
// ffmpeg (http://crbug.com/390941).
|
||||
// TODO(https://crbug.com/390941): Deprecated. Should be deleted if/when those
|
||||
// issues are resolved in a better way. Or in the mean time, use SetBlack.
|
||||
void InitializeData();
|
||||
|
||||
// Scale the cropped area of `src` to the size of `this` buffer, and
|
||||
// write the result into `this`.
|
||||
void CropAndScaleFrom(const NV12BufferInterface& src,
|
||||
int offset_x,
|
||||
int offset_y,
|
||||
int crop_width,
|
||||
int crop_height);
|
||||
|
||||
protected:
|
||||
NV12Buffer(int width, int height);
|
||||
NV12Buffer(int width, int height, int stride_y, int stride_uv);
|
||||
|
||||
~NV12Buffer() override;
|
||||
|
||||
private:
|
||||
size_t UVOffset() const;
|
||||
|
||||
const int width_;
|
||||
const int height_;
|
||||
const int stride_y_;
|
||||
const int stride_uv_;
|
||||
const std::unique_ptr<uint8_t, AlignedFreeDeleter> data_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_NV12_BUFFER_H_
|
||||
|
|
@ -0,0 +1,61 @@
|
|||
/*
|
||||
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_RECORDABLE_ENCODED_FRAME_H_
|
||||
#define API_VIDEO_RECORDABLE_ENCODED_FRAME_H_
|
||||
|
||||
#include "api/array_view.h"
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/units/timestamp.h"
|
||||
#include "api/video/color_space.h"
|
||||
#include "api/video/encoded_image.h"
|
||||
#include "api/video/video_codec_type.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Interface for accessing recordable elements of an encoded frame.
|
||||
class RecordableEncodedFrame {
|
||||
public:
|
||||
// Encoded resolution in pixels
|
||||
// TODO(bugs.webrtc.org/12114) : remove in favor of Resolution.
|
||||
struct EncodedResolution {
|
||||
bool empty() const { return width == 0 && height == 0; }
|
||||
|
||||
unsigned width = 0;
|
||||
unsigned height = 0;
|
||||
};
|
||||
|
||||
virtual ~RecordableEncodedFrame() = default;
|
||||
|
||||
// Provides access to encoded data
|
||||
virtual rtc::scoped_refptr<const EncodedImageBufferInterface> encoded_buffer()
|
||||
const = 0;
|
||||
|
||||
// Optionally returns the colorspace of the encoded frame. This can differ
|
||||
// from the eventually decoded frame's colorspace.
|
||||
virtual absl::optional<webrtc::ColorSpace> color_space() const = 0;
|
||||
|
||||
// Returns the codec of the encoded frame
|
||||
virtual VideoCodecType codec() const = 0;
|
||||
|
||||
// Returns whether the encoded frame is a key frame
|
||||
virtual bool is_key_frame() const = 0;
|
||||
|
||||
// Returns the frame's encoded resolution. May be 0x0 if the frame
|
||||
// doesn't contain resolution information
|
||||
virtual EncodedResolution resolution() const = 0;
|
||||
|
||||
// Returns the computed render time
|
||||
virtual Timestamp render_time() const = 0;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_RECORDABLE_ENCODED_FRAME_H_
|
||||
46
TMessagesProj/jni/voip/webrtc/api/video/render_resolution.h
Normal file
46
TMessagesProj/jni/voip/webrtc/api/video/render_resolution.h
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
/*
|
||||
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_RENDER_RESOLUTION_H_
|
||||
#define API_VIDEO_RENDER_RESOLUTION_H_
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// TODO(bugs.webrtc.org/12114) : remove in favor of Resolution.
|
||||
class RenderResolution {
|
||||
public:
|
||||
constexpr RenderResolution() = default;
|
||||
constexpr RenderResolution(int width, int height)
|
||||
: width_(width), height_(height) {}
|
||||
RenderResolution(const RenderResolution&) = default;
|
||||
RenderResolution& operator=(const RenderResolution&) = default;
|
||||
|
||||
friend bool operator==(const RenderResolution& lhs,
|
||||
const RenderResolution& rhs) {
|
||||
return lhs.width_ == rhs.width_ && lhs.height_ == rhs.height_;
|
||||
}
|
||||
friend bool operator!=(const RenderResolution& lhs,
|
||||
const RenderResolution& rhs) {
|
||||
return !(lhs == rhs);
|
||||
}
|
||||
|
||||
constexpr bool Valid() const { return width_ > 0 && height_ > 0; }
|
||||
|
||||
constexpr int Width() const { return width_; }
|
||||
constexpr int Height() const { return height_; }
|
||||
|
||||
private:
|
||||
int width_ = 0;
|
||||
int height_ = 0;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_RENDER_RESOLUTION_H_
|
||||
38
TMessagesProj/jni/voip/webrtc/api/video/resolution.h
Normal file
38
TMessagesProj/jni/voip/webrtc/api/video/resolution.h
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
/*
|
||||
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_RESOLUTION_H_
|
||||
#define API_VIDEO_RESOLUTION_H_
|
||||
|
||||
#include <utility>
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// A struct representing a video resolution in pixels.
|
||||
struct Resolution {
|
||||
int width = 0;
|
||||
int height = 0;
|
||||
|
||||
// Helper methods.
|
||||
int PixelCount() const { return width * height; }
|
||||
std::pair<int, int> ToPair() const { return std::make_pair(width, height); }
|
||||
};
|
||||
|
||||
inline bool operator==(const Resolution& lhs, const Resolution& rhs) {
|
||||
return lhs.width == rhs.width && lhs.height == rhs.height;
|
||||
}
|
||||
|
||||
inline bool operator!=(const Resolution& lhs, const Resolution& rhs) {
|
||||
return !(lhs == rhs);
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_RESOLUTION_H_
|
||||
|
|
@ -0,0 +1,344 @@
|
|||
/*
|
||||
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/rtp_video_frame_assembler.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <cstdint>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/container/inlined_vector.h"
|
||||
#include "absl/types/optional.h"
|
||||
#include "modules/rtp_rtcp/source/frame_object.h"
|
||||
#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h"
|
||||
#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h"
|
||||
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
|
||||
#include "modules/rtp_rtcp/source/video_rtp_depacketizer_av1.h"
|
||||
#include "modules/rtp_rtcp/source/video_rtp_depacketizer_generic.h"
|
||||
#include "modules/rtp_rtcp/source/video_rtp_depacketizer_h264.h"
|
||||
#include "modules/rtp_rtcp/source/video_rtp_depacketizer_raw.h"
|
||||
#include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.h"
|
||||
#include "modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.h"
|
||||
#include "modules/video_coding/packet_buffer.h"
|
||||
#include "modules/video_coding/rtp_frame_reference_finder.h"
|
||||
#include "rtc_base/logging.h"
|
||||
#include "rtc_base/numerics/sequence_number_unwrapper.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace {
|
||||
std::unique_ptr<VideoRtpDepacketizer> CreateDepacketizer(
|
||||
RtpVideoFrameAssembler::PayloadFormat payload_format) {
|
||||
switch (payload_format) {
|
||||
case RtpVideoFrameAssembler::kRaw:
|
||||
return std::make_unique<VideoRtpDepacketizerRaw>();
|
||||
case RtpVideoFrameAssembler::kH264:
|
||||
return std::make_unique<VideoRtpDepacketizerH264>();
|
||||
case RtpVideoFrameAssembler::kVp8:
|
||||
return std::make_unique<VideoRtpDepacketizerVp8>();
|
||||
case RtpVideoFrameAssembler::kVp9:
|
||||
return std::make_unique<VideoRtpDepacketizerVp9>();
|
||||
case RtpVideoFrameAssembler::kAv1:
|
||||
return std::make_unique<VideoRtpDepacketizerAv1>();
|
||||
case RtpVideoFrameAssembler::kGeneric:
|
||||
return std::make_unique<VideoRtpDepacketizerGeneric>();
|
||||
case RtpVideoFrameAssembler::kH265:
|
||||
// TODO(bugs.webrtc.org/13485): Implement VideoRtpDepacketizerH265
|
||||
RTC_DCHECK_NOTREACHED();
|
||||
return nullptr;
|
||||
}
|
||||
RTC_DCHECK_NOTREACHED();
|
||||
return nullptr;
|
||||
}
|
||||
} // namespace
|
||||
|
||||
class RtpVideoFrameAssembler::Impl {
|
||||
public:
|
||||
explicit Impl(std::unique_ptr<VideoRtpDepacketizer> depacketizer);
|
||||
~Impl() = default;
|
||||
|
||||
FrameVector InsertPacket(const RtpPacketReceived& packet);
|
||||
|
||||
private:
|
||||
using RtpFrameVector =
|
||||
absl::InlinedVector<std::unique_ptr<RtpFrameObject>, 3>;
|
||||
|
||||
RtpFrameVector AssembleFrames(
|
||||
video_coding::PacketBuffer::InsertResult insert_result);
|
||||
FrameVector FindReferences(RtpFrameVector frames);
|
||||
FrameVector UpdateWithPadding(uint16_t seq_num);
|
||||
bool ParseDependenciesDescriptorExtension(const RtpPacketReceived& rtp_packet,
|
||||
RTPVideoHeader& video_header);
|
||||
bool ParseGenericDescriptorExtension(const RtpPacketReceived& rtp_packet,
|
||||
RTPVideoHeader& video_header);
|
||||
void ClearOldData(uint16_t incoming_seq_num);
|
||||
|
||||
std::unique_ptr<FrameDependencyStructure> video_structure_;
|
||||
SeqNumUnwrapper<uint16_t> frame_id_unwrapper_;
|
||||
absl::optional<int64_t> video_structure_frame_id_;
|
||||
std::unique_ptr<VideoRtpDepacketizer> depacketizer_;
|
||||
video_coding::PacketBuffer packet_buffer_;
|
||||
RtpFrameReferenceFinder reference_finder_;
|
||||
};
|
||||
|
||||
RtpVideoFrameAssembler::Impl::Impl(
|
||||
std::unique_ptr<VideoRtpDepacketizer> depacketizer)
|
||||
: depacketizer_(std::move(depacketizer)),
|
||||
packet_buffer_(/*start_buffer_size=*/2048, /*max_buffer_size=*/2048) {}
|
||||
|
||||
RtpVideoFrameAssembler::FrameVector RtpVideoFrameAssembler::Impl::InsertPacket(
|
||||
const RtpPacketReceived& rtp_packet) {
|
||||
if (rtp_packet.payload_size() == 0) {
|
||||
ClearOldData(rtp_packet.SequenceNumber());
|
||||
return UpdateWithPadding(rtp_packet.SequenceNumber());
|
||||
}
|
||||
|
||||
absl::optional<VideoRtpDepacketizer::ParsedRtpPayload> parsed_payload =
|
||||
depacketizer_->Parse(rtp_packet.PayloadBuffer());
|
||||
|
||||
if (parsed_payload == absl::nullopt) {
|
||||
return {};
|
||||
}
|
||||
|
||||
if (rtp_packet.HasExtension<RtpDependencyDescriptorExtension>()) {
|
||||
if (!ParseDependenciesDescriptorExtension(rtp_packet,
|
||||
parsed_payload->video_header)) {
|
||||
return {};
|
||||
}
|
||||
} else if (rtp_packet.HasExtension<RtpGenericFrameDescriptorExtension00>()) {
|
||||
if (!ParseGenericDescriptorExtension(rtp_packet,
|
||||
parsed_payload->video_header)) {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
parsed_payload->video_header.is_last_packet_in_frame |= rtp_packet.Marker();
|
||||
|
||||
auto packet = std::make_unique<video_coding::PacketBuffer::Packet>(
|
||||
rtp_packet, parsed_payload->video_header);
|
||||
packet->video_payload = std::move(parsed_payload->video_payload);
|
||||
|
||||
ClearOldData(rtp_packet.SequenceNumber());
|
||||
return FindReferences(
|
||||
AssembleFrames(packet_buffer_.InsertPacket(std::move(packet))));
|
||||
}
|
||||
|
||||
void RtpVideoFrameAssembler::Impl::ClearOldData(uint16_t incoming_seq_num) {
|
||||
constexpr uint16_t kOldSeqNumThreshold = 2000;
|
||||
uint16_t old_seq_num = incoming_seq_num - kOldSeqNumThreshold;
|
||||
packet_buffer_.ClearTo(old_seq_num);
|
||||
reference_finder_.ClearTo(old_seq_num);
|
||||
}
|
||||
|
||||
RtpVideoFrameAssembler::Impl::RtpFrameVector
|
||||
RtpVideoFrameAssembler::Impl::AssembleFrames(
|
||||
video_coding::PacketBuffer::InsertResult insert_result) {
|
||||
video_coding::PacketBuffer::Packet* first_packet = nullptr;
|
||||
std::vector<rtc::ArrayView<const uint8_t>> payloads;
|
||||
RtpFrameVector result;
|
||||
|
||||
for (auto& packet : insert_result.packets) {
|
||||
if (packet->is_first_packet_in_frame()) {
|
||||
first_packet = packet.get();
|
||||
payloads.clear();
|
||||
}
|
||||
payloads.emplace_back(packet->video_payload);
|
||||
|
||||
if (packet->is_last_packet_in_frame()) {
|
||||
rtc::scoped_refptr<EncodedImageBuffer> bitstream =
|
||||
depacketizer_->AssembleFrame(payloads);
|
||||
|
||||
if (!bitstream) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const video_coding::PacketBuffer::Packet& last_packet = *packet;
|
||||
result.push_back(std::make_unique<RtpFrameObject>(
|
||||
first_packet->seq_num, //
|
||||
last_packet.seq_num, //
|
||||
last_packet.marker_bit, //
|
||||
/*times_nacked=*/0, //
|
||||
/*first_packet_received_time=*/0, //
|
||||
/*last_packet_received_time=*/0, //
|
||||
first_packet->timestamp, //
|
||||
/*ntp_time_ms=*/0, //
|
||||
/*timing=*/VideoSendTiming(), //
|
||||
first_packet->payload_type, //
|
||||
first_packet->codec(), //
|
||||
last_packet.video_header.rotation, //
|
||||
last_packet.video_header.content_type, //
|
||||
first_packet->video_header, //
|
||||
last_packet.video_header.color_space, //
|
||||
/*packet_infos=*/RtpPacketInfos(), //
|
||||
std::move(bitstream)));
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
RtpVideoFrameAssembler::FrameVector
|
||||
RtpVideoFrameAssembler::Impl::FindReferences(RtpFrameVector frames) {
|
||||
FrameVector res;
|
||||
for (auto& frame : frames) {
|
||||
auto complete_frames = reference_finder_.ManageFrame(std::move(frame));
|
||||
for (std::unique_ptr<RtpFrameObject>& complete_frame : complete_frames) {
|
||||
uint16_t rtp_seq_num_start = complete_frame->first_seq_num();
|
||||
uint16_t rtp_seq_num_end = complete_frame->last_seq_num();
|
||||
res.emplace_back(rtp_seq_num_start, rtp_seq_num_end,
|
||||
std::move(complete_frame));
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
RtpVideoFrameAssembler::FrameVector
|
||||
RtpVideoFrameAssembler::Impl::UpdateWithPadding(uint16_t seq_num) {
|
||||
auto res =
|
||||
FindReferences(AssembleFrames(packet_buffer_.InsertPadding(seq_num)));
|
||||
auto ref_finder_update = reference_finder_.PaddingReceived(seq_num);
|
||||
|
||||
for (std::unique_ptr<RtpFrameObject>& complete_frame : ref_finder_update) {
|
||||
uint16_t rtp_seq_num_start = complete_frame->first_seq_num();
|
||||
uint16_t rtp_seq_num_end = complete_frame->last_seq_num();
|
||||
res.emplace_back(rtp_seq_num_start, rtp_seq_num_end,
|
||||
std::move(complete_frame));
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
bool RtpVideoFrameAssembler::Impl::ParseDependenciesDescriptorExtension(
|
||||
const RtpPacketReceived& rtp_packet,
|
||||
RTPVideoHeader& video_header) {
|
||||
webrtc::DependencyDescriptor dependency_descriptor;
|
||||
|
||||
if (!rtp_packet.GetExtension<RtpDependencyDescriptorExtension>(
|
||||
video_structure_.get(), &dependency_descriptor)) {
|
||||
// Descriptor is either malformed, or the template referenced is not in
|
||||
// the `video_structure_` currently being held.
|
||||
// TODO(bugs.webrtc.org/10342): Improve packet reordering behavior.
|
||||
RTC_LOG(LS_WARNING) << "ssrc: " << rtp_packet.Ssrc()
|
||||
<< " Failed to parse dependency descriptor.";
|
||||
return false;
|
||||
}
|
||||
|
||||
if (dependency_descriptor.attached_structure != nullptr &&
|
||||
!dependency_descriptor.first_packet_in_frame) {
|
||||
RTC_LOG(LS_WARNING) << "ssrc: " << rtp_packet.Ssrc()
|
||||
<< "Invalid dependency descriptor: structure "
|
||||
"attached to non first packet of a frame.";
|
||||
return false;
|
||||
}
|
||||
|
||||
video_header.is_first_packet_in_frame =
|
||||
dependency_descriptor.first_packet_in_frame;
|
||||
video_header.is_last_packet_in_frame =
|
||||
dependency_descriptor.last_packet_in_frame;
|
||||
|
||||
int64_t frame_id =
|
||||
frame_id_unwrapper_.Unwrap(dependency_descriptor.frame_number);
|
||||
auto& generic_descriptor_info = video_header.generic.emplace();
|
||||
generic_descriptor_info.frame_id = frame_id;
|
||||
generic_descriptor_info.spatial_index =
|
||||
dependency_descriptor.frame_dependencies.spatial_id;
|
||||
generic_descriptor_info.temporal_index =
|
||||
dependency_descriptor.frame_dependencies.temporal_id;
|
||||
|
||||
for (int fdiff : dependency_descriptor.frame_dependencies.frame_diffs) {
|
||||
generic_descriptor_info.dependencies.push_back(frame_id - fdiff);
|
||||
}
|
||||
for (int cdiff : dependency_descriptor.frame_dependencies.chain_diffs) {
|
||||
generic_descriptor_info.chain_diffs.push_back(frame_id - cdiff);
|
||||
}
|
||||
generic_descriptor_info.decode_target_indications =
|
||||
dependency_descriptor.frame_dependencies.decode_target_indications;
|
||||
if (dependency_descriptor.resolution) {
|
||||
video_header.width = dependency_descriptor.resolution->Width();
|
||||
video_header.height = dependency_descriptor.resolution->Height();
|
||||
}
|
||||
if (dependency_descriptor.active_decode_targets_bitmask.has_value()) {
|
||||
generic_descriptor_info.active_decode_targets =
|
||||
*dependency_descriptor.active_decode_targets_bitmask;
|
||||
}
|
||||
|
||||
// FrameDependencyStructure is sent in the dependency descriptor of the first
|
||||
// packet of a key frame and is required to parse all subsequent packets until
|
||||
// the next key frame.
|
||||
if (dependency_descriptor.attached_structure) {
|
||||
RTC_DCHECK(dependency_descriptor.first_packet_in_frame);
|
||||
if (video_structure_frame_id_ > frame_id) {
|
||||
RTC_LOG(LS_WARNING)
|
||||
<< "Arrived key frame with id " << frame_id << " and structure id "
|
||||
<< dependency_descriptor.attached_structure->structure_id
|
||||
<< " is older than the latest received key frame with id "
|
||||
<< *video_structure_frame_id_ << " and structure id "
|
||||
<< video_structure_->structure_id;
|
||||
return false;
|
||||
}
|
||||
video_structure_ = std::move(dependency_descriptor.attached_structure);
|
||||
video_structure_frame_id_ = frame_id;
|
||||
video_header.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
} else {
|
||||
video_header.frame_type = VideoFrameType::kVideoFrameDelta;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool RtpVideoFrameAssembler::Impl::ParseGenericDescriptorExtension(
|
||||
const RtpPacketReceived& rtp_packet,
|
||||
RTPVideoHeader& video_header) {
|
||||
RtpGenericFrameDescriptor generic_frame_descriptor;
|
||||
if (!rtp_packet.GetExtension<RtpGenericFrameDescriptorExtension00>(
|
||||
&generic_frame_descriptor)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
video_header.is_first_packet_in_frame =
|
||||
generic_frame_descriptor.FirstPacketInSubFrame();
|
||||
video_header.is_last_packet_in_frame =
|
||||
generic_frame_descriptor.LastPacketInSubFrame();
|
||||
|
||||
if (generic_frame_descriptor.FirstPacketInSubFrame()) {
|
||||
video_header.frame_type =
|
||||
generic_frame_descriptor.FrameDependenciesDiffs().empty()
|
||||
? VideoFrameType::kVideoFrameKey
|
||||
: VideoFrameType::kVideoFrameDelta;
|
||||
|
||||
auto& generic_descriptor_info = video_header.generic.emplace();
|
||||
int64_t frame_id =
|
||||
frame_id_unwrapper_.Unwrap(generic_frame_descriptor.FrameId());
|
||||
generic_descriptor_info.frame_id = frame_id;
|
||||
generic_descriptor_info.spatial_index =
|
||||
generic_frame_descriptor.SpatialLayer();
|
||||
generic_descriptor_info.temporal_index =
|
||||
generic_frame_descriptor.TemporalLayer();
|
||||
for (uint16_t fdiff : generic_frame_descriptor.FrameDependenciesDiffs()) {
|
||||
generic_descriptor_info.dependencies.push_back(frame_id - fdiff);
|
||||
}
|
||||
}
|
||||
video_header.width = generic_frame_descriptor.Width();
|
||||
video_header.height = generic_frame_descriptor.Height();
|
||||
return true;
|
||||
}
|
||||
|
||||
RtpVideoFrameAssembler::RtpVideoFrameAssembler(PayloadFormat payload_format)
|
||||
: impl_(std::make_unique<Impl>(CreateDepacketizer(payload_format))) {}
|
||||
|
||||
RtpVideoFrameAssembler::~RtpVideoFrameAssembler() = default;
|
||||
|
||||
RtpVideoFrameAssembler::FrameVector RtpVideoFrameAssembler::InsertPacket(
|
||||
const RtpPacketReceived& packet) {
|
||||
return impl_->InsertPacket(packet);
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
@ -0,0 +1,76 @@
|
|||
/*
|
||||
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_RTP_VIDEO_FRAME_ASSEMBLER_H_
|
||||
#define API_VIDEO_RTP_VIDEO_FRAME_ASSEMBLER_H_
|
||||
|
||||
#include <cstdint>
|
||||
#include <memory>
|
||||
#include <utility>
|
||||
|
||||
#include "absl/container/inlined_vector.h"
|
||||
#include "api/video/encoded_frame.h"
|
||||
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
|
||||
|
||||
namespace webrtc {
|
||||
// The RtpVideoFrameAssembler takes RtpPacketReceived and assembles them into
|
||||
// complete frames. A frame is considered complete when all packets of the frame
|
||||
// has been received, the bitstream data has successfully extracted, an ID has
|
||||
// been assigned, and all dependencies are known. Frame IDs are strictly
|
||||
// monotonic in decode order, dependencies are expressed as frame IDs.
|
||||
class RtpVideoFrameAssembler {
|
||||
public:
|
||||
// The RtpVideoFrameAssembler should return "RTP frames", but for now there
|
||||
// is no good class for this purpose. For now return an EncodedFrame bundled
|
||||
// with some minimal RTP information.
|
||||
class AssembledFrame {
|
||||
public:
|
||||
AssembledFrame(uint16_t rtp_seq_num_start,
|
||||
uint16_t rtp_seq_num_end,
|
||||
std::unique_ptr<EncodedFrame> frame)
|
||||
: rtp_seq_num_start_(rtp_seq_num_start),
|
||||
rtp_seq_num_end_(rtp_seq_num_end),
|
||||
frame_(std::move(frame)) {}
|
||||
|
||||
uint16_t RtpSeqNumStart() const { return rtp_seq_num_start_; }
|
||||
uint16_t RtpSeqNumEnd() const { return rtp_seq_num_end_; }
|
||||
std::unique_ptr<EncodedFrame> ExtractFrame() { return std::move(frame_); }
|
||||
|
||||
private:
|
||||
uint16_t rtp_seq_num_start_;
|
||||
uint16_t rtp_seq_num_end_;
|
||||
std::unique_ptr<EncodedFrame> frame_;
|
||||
};
|
||||
|
||||
// FrameVector is just a vector-like type of std::unique_ptr<EncodedFrame>.
|
||||
// The vector type may change without notice.
|
||||
using FrameVector = absl::InlinedVector<AssembledFrame, 3>;
|
||||
enum PayloadFormat { kRaw, kH264, kVp8, kVp9, kAv1, kGeneric, kH265 };
|
||||
|
||||
explicit RtpVideoFrameAssembler(PayloadFormat payload_format);
|
||||
RtpVideoFrameAssembler(const RtpVideoFrameAssembler& other) = delete;
|
||||
RtpVideoFrameAssembler& operator=(const RtpVideoFrameAssembler& other) =
|
||||
delete;
|
||||
~RtpVideoFrameAssembler();
|
||||
|
||||
// Typically when a packet is inserted zero or one frame is completed. In the
|
||||
// case of RTP packets being inserted out of order then sometime multiple
|
||||
// frames could be completed from a single packet, hence the 'FrameVector'
|
||||
// return type.
|
||||
FrameVector InsertPacket(const RtpPacketReceived& packet);
|
||||
|
||||
private:
|
||||
class Impl;
|
||||
std::unique_ptr<Impl> impl_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_RTP_VIDEO_FRAME_ASSEMBLER_H_
|
||||
|
|
@ -0,0 +1,586 @@
|
|||
/*
|
||||
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "api/array_view.h"
|
||||
#include "api/video/rtp_video_frame_assembler.h"
|
||||
#include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h"
|
||||
#include "modules/rtp_rtcp/source/rtp_format.h"
|
||||
#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h"
|
||||
#include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
|
||||
#include "modules/rtp_rtcp/source/rtp_packetizer_av1_test_helper.h"
|
||||
#include "test/gmock.h"
|
||||
#include "test/gtest.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace {
|
||||
|
||||
using ::testing::ElementsAreArray;
|
||||
using ::testing::Eq;
|
||||
using ::testing::IsEmpty;
|
||||
using ::testing::Matches;
|
||||
using ::testing::SizeIs;
|
||||
using ::testing::UnorderedElementsAre;
|
||||
using ::testing::UnorderedElementsAreArray;
|
||||
using PayloadFormat = RtpVideoFrameAssembler::PayloadFormat;
|
||||
|
||||
class PacketBuilder {
|
||||
public:
|
||||
explicit PacketBuilder(PayloadFormat format)
|
||||
: format_(format), packet_to_send_(&extension_manager_) {}
|
||||
|
||||
PacketBuilder& WithSeqNum(uint16_t seq_num) {
|
||||
seq_num_ = seq_num;
|
||||
return *this;
|
||||
}
|
||||
|
||||
PacketBuilder& WithPayload(rtc::ArrayView<const uint8_t> payload) {
|
||||
payload_.assign(payload.begin(), payload.end());
|
||||
return *this;
|
||||
}
|
||||
|
||||
PacketBuilder& WithVideoHeader(const RTPVideoHeader& video_header) {
|
||||
video_header_ = video_header;
|
||||
return *this;
|
||||
}
|
||||
|
||||
template <typename T, typename... Args>
|
||||
PacketBuilder& WithExtension(int id, const Args&... args) {
|
||||
extension_manager_.Register<T>(id);
|
||||
packet_to_send_.IdentifyExtensions(extension_manager_);
|
||||
packet_to_send_.SetExtension<T>(std::forward<const Args>(args)...);
|
||||
return *this;
|
||||
}
|
||||
|
||||
RtpPacketReceived Build() {
|
||||
auto packetizer =
|
||||
RtpPacketizer::Create(GetVideoCodecType(), payload_, {}, video_header_);
|
||||
packetizer->NextPacket(&packet_to_send_);
|
||||
packet_to_send_.SetSequenceNumber(seq_num_);
|
||||
|
||||
RtpPacketReceived received(&extension_manager_);
|
||||
received.Parse(packet_to_send_.Buffer());
|
||||
return received;
|
||||
}
|
||||
|
||||
private:
|
||||
absl::optional<VideoCodecType> GetVideoCodecType() {
|
||||
switch (format_) {
|
||||
case PayloadFormat::kRaw: {
|
||||
return absl::nullopt;
|
||||
}
|
||||
case PayloadFormat::kH264: {
|
||||
return kVideoCodecH264;
|
||||
}
|
||||
case PayloadFormat::kVp8: {
|
||||
return kVideoCodecVP8;
|
||||
}
|
||||
case PayloadFormat::kVp9: {
|
||||
return kVideoCodecVP9;
|
||||
}
|
||||
case PayloadFormat::kAv1: {
|
||||
return kVideoCodecAV1;
|
||||
}
|
||||
case PayloadFormat::kH265: {
|
||||
return kVideoCodecH265;
|
||||
}
|
||||
case PayloadFormat::kGeneric: {
|
||||
return kVideoCodecGeneric;
|
||||
}
|
||||
}
|
||||
RTC_DCHECK_NOTREACHED();
|
||||
return absl::nullopt;
|
||||
}
|
||||
|
||||
const RtpVideoFrameAssembler::PayloadFormat format_;
|
||||
uint16_t seq_num_ = 0;
|
||||
std::vector<uint8_t> payload_;
|
||||
RTPVideoHeader video_header_;
|
||||
RtpPacketReceived::ExtensionManager extension_manager_;
|
||||
RtpPacketToSend packet_to_send_;
|
||||
};
|
||||
|
||||
RtpPacketReceived PaddingPacket(uint16_t seq_num) {
|
||||
RtpPacketReceived padding_packet;
|
||||
padding_packet.SetSequenceNumber(seq_num);
|
||||
padding_packet.SetPadding(224);
|
||||
return padding_packet;
|
||||
}
|
||||
|
||||
void AppendFrames(RtpVideoFrameAssembler::FrameVector from,
|
||||
RtpVideoFrameAssembler::FrameVector& to) {
|
||||
to.insert(to.end(), std::make_move_iterator(from.begin()),
|
||||
std::make_move_iterator(from.end()));
|
||||
}
|
||||
|
||||
rtc::ArrayView<int64_t> References(const std::unique_ptr<EncodedFrame>& frame) {
|
||||
return rtc::MakeArrayView(frame->references, frame->num_references);
|
||||
}
|
||||
|
||||
rtc::ArrayView<uint8_t> Payload(const std::unique_ptr<EncodedFrame>& frame) {
|
||||
return rtc::ArrayView<uint8_t>(*frame->GetEncodedData());
|
||||
}
|
||||
|
||||
TEST(RtpVideoFrameAssembler, Vp8Packetization) {
|
||||
RtpVideoFrameAssembler assembler(RtpVideoFrameAssembler::kVp8);
|
||||
|
||||
// When sending VP8 over RTP parts of the payload is actually inspected at the
|
||||
// RTP level. It just so happen that the initial 'V' sets the keyframe bit
|
||||
// (0x01) to the correct value.
|
||||
uint8_t kKeyframePayload[] = "Vp8Keyframe";
|
||||
ASSERT_EQ(kKeyframePayload[0] & 0x01, 0);
|
||||
|
||||
uint8_t kDeltaframePayload[] = "SomeFrame";
|
||||
ASSERT_EQ(kDeltaframePayload[0] & 0x01, 1);
|
||||
|
||||
RtpVideoFrameAssembler::FrameVector frames;
|
||||
|
||||
RTPVideoHeader video_header;
|
||||
auto& vp8_header =
|
||||
video_header.video_type_header.emplace<RTPVideoHeaderVP8>();
|
||||
|
||||
vp8_header.pictureId = 10;
|
||||
vp8_header.tl0PicIdx = 0;
|
||||
AppendFrames(assembler.InsertPacket(PacketBuilder(PayloadFormat::kVp8)
|
||||
.WithPayload(kKeyframePayload)
|
||||
.WithVideoHeader(video_header)
|
||||
.Build()),
|
||||
frames);
|
||||
|
||||
vp8_header.pictureId = 11;
|
||||
vp8_header.tl0PicIdx = 1;
|
||||
AppendFrames(assembler.InsertPacket(PacketBuilder(PayloadFormat::kVp8)
|
||||
.WithPayload(kDeltaframePayload)
|
||||
.WithVideoHeader(video_header)
|
||||
.Build()),
|
||||
frames);
|
||||
|
||||
ASSERT_THAT(frames, SizeIs(2));
|
||||
|
||||
auto first_frame = frames[0].ExtractFrame();
|
||||
EXPECT_THAT(first_frame->Id(), Eq(10));
|
||||
EXPECT_THAT(References(first_frame), IsEmpty());
|
||||
EXPECT_THAT(Payload(first_frame), ElementsAreArray(kKeyframePayload));
|
||||
|
||||
auto second_frame = frames[1].ExtractFrame();
|
||||
EXPECT_THAT(second_frame->Id(), Eq(11));
|
||||
EXPECT_THAT(References(second_frame), UnorderedElementsAre(10));
|
||||
EXPECT_THAT(Payload(second_frame), ElementsAreArray(kDeltaframePayload));
|
||||
}
|
||||
|
||||
TEST(RtpVideoFrameAssembler, Vp9Packetization) {
|
||||
RtpVideoFrameAssembler assembler(RtpVideoFrameAssembler::kVp9);
|
||||
RtpVideoFrameAssembler::FrameVector frames;
|
||||
|
||||
uint8_t kPayload[] = "SomePayload";
|
||||
|
||||
RTPVideoHeader video_header;
|
||||
auto& vp9_header =
|
||||
video_header.video_type_header.emplace<RTPVideoHeaderVP9>();
|
||||
vp9_header.InitRTPVideoHeaderVP9();
|
||||
|
||||
vp9_header.picture_id = 10;
|
||||
vp9_header.tl0_pic_idx = 0;
|
||||
AppendFrames(assembler.InsertPacket(PacketBuilder(PayloadFormat::kVp9)
|
||||
.WithPayload(kPayload)
|
||||
.WithVideoHeader(video_header)
|
||||
.Build()),
|
||||
frames);
|
||||
|
||||
vp9_header.picture_id = 11;
|
||||
vp9_header.tl0_pic_idx = 1;
|
||||
vp9_header.inter_pic_predicted = true;
|
||||
AppendFrames(assembler.InsertPacket(PacketBuilder(PayloadFormat::kVp9)
|
||||
.WithPayload(kPayload)
|
||||
.WithVideoHeader(video_header)
|
||||
.Build()),
|
||||
frames);
|
||||
|
||||
ASSERT_THAT(frames, SizeIs(2));
|
||||
|
||||
auto first_frame = frames[0].ExtractFrame();
|
||||
EXPECT_THAT(first_frame->Id(), Eq(10));
|
||||
EXPECT_THAT(Payload(first_frame), ElementsAreArray(kPayload));
|
||||
EXPECT_THAT(References(first_frame), IsEmpty());
|
||||
|
||||
auto second_frame = frames[1].ExtractFrame();
|
||||
EXPECT_THAT(second_frame->Id(), Eq(11));
|
||||
EXPECT_THAT(Payload(second_frame), ElementsAreArray(kPayload));
|
||||
EXPECT_THAT(References(second_frame), UnorderedElementsAre(10));
|
||||
}
|
||||
|
||||
TEST(RtpVideoFrameAssembler, Av1Packetization) {
|
||||
RtpVideoFrameAssembler assembler(RtpVideoFrameAssembler::kAv1);
|
||||
RtpVideoFrameAssembler::FrameVector frames;
|
||||
|
||||
auto kKeyframePayload =
|
||||
BuildAv1Frame({Av1Obu(kAv1ObuTypeSequenceHeader).WithPayload({1, 2, 3}),
|
||||
Av1Obu(kAv1ObuTypeFrame).WithPayload({4, 5, 6})});
|
||||
|
||||
auto kDeltaframePayload =
|
||||
BuildAv1Frame({Av1Obu(kAv1ObuTypeFrame).WithPayload({7, 8, 9})});
|
||||
|
||||
RTPVideoHeader video_header;
|
||||
|
||||
video_header.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
AppendFrames(assembler.InsertPacket(PacketBuilder(PayloadFormat::kAv1)
|
||||
.WithPayload(kKeyframePayload)
|
||||
.WithVideoHeader(video_header)
|
||||
.WithSeqNum(20)
|
||||
.Build()),
|
||||
frames);
|
||||
|
||||
AppendFrames(assembler.InsertPacket(PacketBuilder(PayloadFormat::kAv1)
|
||||
.WithPayload(kDeltaframePayload)
|
||||
.WithSeqNum(21)
|
||||
.Build()),
|
||||
frames);
|
||||
|
||||
ASSERT_THAT(frames, SizeIs(2));
|
||||
|
||||
auto first_frame = frames[0].ExtractFrame();
|
||||
EXPECT_THAT(first_frame->Id(), Eq(20));
|
||||
EXPECT_THAT(Payload(first_frame), ElementsAreArray(kKeyframePayload));
|
||||
EXPECT_THAT(References(first_frame), IsEmpty());
|
||||
|
||||
auto second_frame = frames[1].ExtractFrame();
|
||||
EXPECT_THAT(second_frame->Id(), Eq(21));
|
||||
EXPECT_THAT(Payload(second_frame), ElementsAreArray(kDeltaframePayload));
|
||||
EXPECT_THAT(References(second_frame), UnorderedElementsAre(20));
|
||||
}
|
||||
|
||||
TEST(RtpVideoFrameAssembler, RawPacketizationDependencyDescriptorExtension) {
|
||||
RtpVideoFrameAssembler assembler(RtpVideoFrameAssembler::kRaw);
|
||||
RtpVideoFrameAssembler::FrameVector frames;
|
||||
uint8_t kPayload[] = "SomePayload";
|
||||
|
||||
FrameDependencyStructure dependency_structure;
|
||||
dependency_structure.num_decode_targets = 1;
|
||||
dependency_structure.num_chains = 1;
|
||||
dependency_structure.decode_target_protected_by_chain.push_back(0);
|
||||
dependency_structure.templates.push_back(
|
||||
FrameDependencyTemplate().S(0).T(0).Dtis("S").ChainDiffs({0}));
|
||||
dependency_structure.templates.push_back(
|
||||
FrameDependencyTemplate().S(0).T(0).Dtis("S").ChainDiffs({10}).FrameDiffs(
|
||||
{10}));
|
||||
|
||||
DependencyDescriptor dependency_descriptor;
|
||||
|
||||
dependency_descriptor.frame_number = 10;
|
||||
dependency_descriptor.frame_dependencies = dependency_structure.templates[0];
|
||||
dependency_descriptor.attached_structure =
|
||||
std::make_unique<FrameDependencyStructure>(dependency_structure);
|
||||
AppendFrames(assembler.InsertPacket(
|
||||
PacketBuilder(PayloadFormat::kRaw)
|
||||
.WithPayload(kPayload)
|
||||
.WithExtension<RtpDependencyDescriptorExtension>(
|
||||
1, dependency_structure, dependency_descriptor)
|
||||
.Build()),
|
||||
frames);
|
||||
|
||||
dependency_descriptor.frame_number = 20;
|
||||
dependency_descriptor.frame_dependencies = dependency_structure.templates[1];
|
||||
dependency_descriptor.attached_structure.reset();
|
||||
AppendFrames(assembler.InsertPacket(
|
||||
PacketBuilder(PayloadFormat::kRaw)
|
||||
.WithPayload(kPayload)
|
||||
.WithExtension<RtpDependencyDescriptorExtension>(
|
||||
1, dependency_structure, dependency_descriptor)
|
||||
.Build()),
|
||||
frames);
|
||||
|
||||
ASSERT_THAT(frames, SizeIs(2));
|
||||
|
||||
auto first_frame = frames[0].ExtractFrame();
|
||||
EXPECT_THAT(first_frame->Id(), Eq(10));
|
||||
EXPECT_THAT(Payload(first_frame), ElementsAreArray(kPayload));
|
||||
EXPECT_THAT(References(first_frame), IsEmpty());
|
||||
|
||||
auto second_frame = frames[1].ExtractFrame();
|
||||
EXPECT_THAT(second_frame->Id(), Eq(20));
|
||||
EXPECT_THAT(Payload(second_frame), ElementsAreArray(kPayload));
|
||||
EXPECT_THAT(References(second_frame), UnorderedElementsAre(10));
|
||||
}
|
||||
|
||||
TEST(RtpVideoFrameAssembler, RawPacketizationGenericDescriptor00Extension) {
|
||||
RtpVideoFrameAssembler assembler(RtpVideoFrameAssembler::kRaw);
|
||||
RtpVideoFrameAssembler::FrameVector frames;
|
||||
uint8_t kPayload[] = "SomePayload";
|
||||
|
||||
RtpGenericFrameDescriptor generic;
|
||||
|
||||
generic.SetFirstPacketInSubFrame(true);
|
||||
generic.SetLastPacketInSubFrame(true);
|
||||
generic.SetFrameId(100);
|
||||
AppendFrames(
|
||||
assembler.InsertPacket(
|
||||
PacketBuilder(PayloadFormat::kRaw)
|
||||
.WithPayload(kPayload)
|
||||
.WithExtension<RtpGenericFrameDescriptorExtension00>(1, generic)
|
||||
.Build()),
|
||||
frames);
|
||||
|
||||
generic.SetFrameId(102);
|
||||
generic.AddFrameDependencyDiff(2);
|
||||
AppendFrames(
|
||||
assembler.InsertPacket(
|
||||
PacketBuilder(PayloadFormat::kRaw)
|
||||
.WithPayload(kPayload)
|
||||
.WithExtension<RtpGenericFrameDescriptorExtension00>(1, generic)
|
||||
.Build()),
|
||||
frames);
|
||||
|
||||
ASSERT_THAT(frames, SizeIs(2));
|
||||
|
||||
auto first_frame = frames[0].ExtractFrame();
|
||||
EXPECT_THAT(first_frame->Id(), Eq(100));
|
||||
EXPECT_THAT(Payload(first_frame), ElementsAreArray(kPayload));
|
||||
EXPECT_THAT(References(first_frame), IsEmpty());
|
||||
|
||||
auto second_frame = frames[1].ExtractFrame();
|
||||
EXPECT_THAT(second_frame->Id(), Eq(102));
|
||||
EXPECT_THAT(Payload(second_frame), ElementsAreArray(kPayload));
|
||||
EXPECT_THAT(References(second_frame), UnorderedElementsAre(100));
|
||||
}
|
||||
|
||||
TEST(RtpVideoFrameAssembler, RawPacketizationGenericPayloadDescriptor) {
|
||||
RtpVideoFrameAssembler assembler(RtpVideoFrameAssembler::kGeneric);
|
||||
RtpVideoFrameAssembler::FrameVector frames;
|
||||
uint8_t kPayload[] = "SomePayload";
|
||||
|
||||
RTPVideoHeader video_header;
|
||||
|
||||
video_header.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
AppendFrames(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
|
||||
.WithPayload(kPayload)
|
||||
.WithVideoHeader(video_header)
|
||||
.WithSeqNum(123)
|
||||
.Build()),
|
||||
frames);
|
||||
|
||||
video_header.frame_type = VideoFrameType::kVideoFrameDelta;
|
||||
AppendFrames(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
|
||||
.WithPayload(kPayload)
|
||||
.WithVideoHeader(video_header)
|
||||
.WithSeqNum(124)
|
||||
.Build()),
|
||||
frames);
|
||||
|
||||
ASSERT_THAT(frames, SizeIs(2));
|
||||
|
||||
auto first_frame = frames[0].ExtractFrame();
|
||||
EXPECT_THAT(first_frame->Id(), Eq(123));
|
||||
EXPECT_THAT(Payload(first_frame), ElementsAreArray(kPayload));
|
||||
EXPECT_THAT(References(first_frame), IsEmpty());
|
||||
|
||||
auto second_frame = frames[1].ExtractFrame();
|
||||
EXPECT_THAT(second_frame->Id(), Eq(124));
|
||||
EXPECT_THAT(Payload(second_frame), ElementsAreArray(kPayload));
|
||||
EXPECT_THAT(References(second_frame), UnorderedElementsAre(123));
|
||||
}
|
||||
|
||||
TEST(RtpVideoFrameAssembler, Padding) {
|
||||
RtpVideoFrameAssembler assembler(RtpVideoFrameAssembler::kGeneric);
|
||||
RtpVideoFrameAssembler::FrameVector frames;
|
||||
uint8_t kPayload[] = "SomePayload";
|
||||
|
||||
RTPVideoHeader video_header;
|
||||
|
||||
video_header.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
AppendFrames(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
|
||||
.WithPayload(kPayload)
|
||||
.WithVideoHeader(video_header)
|
||||
.WithSeqNum(123)
|
||||
.Build()),
|
||||
frames);
|
||||
|
||||
video_header.frame_type = VideoFrameType::kVideoFrameDelta;
|
||||
AppendFrames(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
|
||||
.WithPayload(kPayload)
|
||||
.WithVideoHeader(video_header)
|
||||
.WithSeqNum(125)
|
||||
.Build()),
|
||||
frames);
|
||||
|
||||
ASSERT_THAT(frames, SizeIs(1));
|
||||
auto first_frame = frames[0].ExtractFrame();
|
||||
EXPECT_THAT(first_frame->Id(), Eq(123));
|
||||
EXPECT_THAT(Payload(first_frame), ElementsAreArray(kPayload));
|
||||
EXPECT_THAT(References(first_frame), IsEmpty());
|
||||
|
||||
AppendFrames(assembler.InsertPacket(PaddingPacket(/*seq_num=*/124)), frames);
|
||||
|
||||
ASSERT_THAT(frames, SizeIs(2));
|
||||
auto second_frame = frames[1].ExtractFrame();
|
||||
EXPECT_THAT(second_frame->Id(), Eq(125));
|
||||
EXPECT_THAT(Payload(second_frame), ElementsAreArray(kPayload));
|
||||
EXPECT_THAT(References(second_frame), UnorderedElementsAre(123));
|
||||
}
|
||||
|
||||
TEST(RtpVideoFrameAssembler, ClearOldPackets) {
|
||||
RtpVideoFrameAssembler assembler(RtpVideoFrameAssembler::kGeneric);
|
||||
|
||||
// If we don't have a payload the packet will be counted as a padding packet.
|
||||
uint8_t kPayload[] = "DontCare";
|
||||
|
||||
RTPVideoHeader video_header;
|
||||
video_header.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
EXPECT_THAT(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
|
||||
.WithPayload(kPayload)
|
||||
.WithVideoHeader(video_header)
|
||||
.WithSeqNum(0)
|
||||
.Build()),
|
||||
SizeIs(1));
|
||||
|
||||
EXPECT_THAT(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
|
||||
.WithPayload(kPayload)
|
||||
.WithVideoHeader(video_header)
|
||||
.WithSeqNum(2000)
|
||||
.Build()),
|
||||
SizeIs(1));
|
||||
|
||||
EXPECT_THAT(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
|
||||
.WithPayload(kPayload)
|
||||
.WithVideoHeader(video_header)
|
||||
.WithSeqNum(0)
|
||||
.Build()),
|
||||
SizeIs(0));
|
||||
|
||||
EXPECT_THAT(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
|
||||
.WithPayload(kPayload)
|
||||
.WithVideoHeader(video_header)
|
||||
.WithSeqNum(1)
|
||||
.Build()),
|
||||
SizeIs(1));
|
||||
}
|
||||
|
||||
TEST(RtpVideoFrameAssembler, ClearOldPacketsWithPadding) {
|
||||
RtpVideoFrameAssembler assembler(RtpVideoFrameAssembler::kGeneric);
|
||||
uint8_t kPayload[] = "DontCare";
|
||||
|
||||
RTPVideoHeader video_header;
|
||||
video_header.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
EXPECT_THAT(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
|
||||
.WithPayload(kPayload)
|
||||
.WithVideoHeader(video_header)
|
||||
.WithSeqNum(0)
|
||||
.Build()),
|
||||
SizeIs(1));
|
||||
|
||||
EXPECT_THAT(assembler.InsertPacket(PaddingPacket(/*seq_num=*/2000)),
|
||||
SizeIs(0));
|
||||
|
||||
EXPECT_THAT(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
|
||||
.WithPayload(kPayload)
|
||||
.WithVideoHeader(video_header)
|
||||
.WithSeqNum(0)
|
||||
.Build()),
|
||||
SizeIs(0));
|
||||
|
||||
EXPECT_THAT(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
|
||||
.WithPayload(kPayload)
|
||||
.WithVideoHeader(video_header)
|
||||
.WithSeqNum(1)
|
||||
.Build()),
|
||||
SizeIs(1));
|
||||
}
|
||||
|
||||
TEST(RtpVideoFrameAssembler, SeqNumStartAndSeqNumEndSet) {
|
||||
RtpVideoFrameAssembler assembler(RtpVideoFrameAssembler::kGeneric);
|
||||
RtpVideoFrameAssembler::FrameVector frames;
|
||||
uint8_t kPayload[] =
|
||||
"Some payload that will get split into two when packetized.";
|
||||
|
||||
RTPVideoHeader video_header;
|
||||
video_header.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
RtpPacketizer::PayloadSizeLimits limits;
|
||||
limits.max_payload_len = sizeof(kPayload) - 1;
|
||||
|
||||
auto packetizer =
|
||||
RtpPacketizer::Create(kVideoCodecGeneric, kPayload, limits, video_header);
|
||||
ASSERT_THAT(packetizer->NumPackets(), Eq(2U));
|
||||
|
||||
RtpPacketReceived::ExtensionManager extension_manager;
|
||||
{
|
||||
RtpPacketToSend send_packet(&extension_manager);
|
||||
packetizer->NextPacket(&send_packet);
|
||||
send_packet.SetSequenceNumber(123);
|
||||
RtpPacketReceived received_packet(&extension_manager);
|
||||
received_packet.Parse(send_packet.Buffer());
|
||||
assembler.InsertPacket(received_packet);
|
||||
}
|
||||
|
||||
{
|
||||
RtpPacketToSend send_packet(&extension_manager);
|
||||
packetizer->NextPacket(&send_packet);
|
||||
send_packet.SetSequenceNumber(124);
|
||||
RtpPacketReceived received_packet(&extension_manager);
|
||||
received_packet.Parse(send_packet.Buffer());
|
||||
AppendFrames(assembler.InsertPacket(received_packet), frames);
|
||||
}
|
||||
|
||||
ASSERT_THAT(frames, SizeIs(1));
|
||||
EXPECT_THAT(frames[0].RtpSeqNumStart(), Eq(123));
|
||||
EXPECT_THAT(frames[0].RtpSeqNumEnd(), Eq(124));
|
||||
}
|
||||
|
||||
TEST(RtpVideoFrameAssembler, SeqNumStartAndSeqNumEndSetWhenPaddingReceived) {
|
||||
RtpVideoFrameAssembler assembler(RtpVideoFrameAssembler::kGeneric);
|
||||
RtpVideoFrameAssembler::FrameVector frames;
|
||||
uint8_t kPayload[] =
|
||||
"Some payload that will get split into two when packetized.";
|
||||
|
||||
RTPVideoHeader video_header;
|
||||
video_header.frame_type = VideoFrameType::kVideoFrameKey;
|
||||
|
||||
EXPECT_THAT(assembler.InsertPacket(PacketBuilder(PayloadFormat::kGeneric)
|
||||
.WithPayload(kPayload)
|
||||
.WithVideoHeader(video_header)
|
||||
.WithSeqNum(121)
|
||||
.Build()),
|
||||
SizeIs(1));
|
||||
|
||||
video_header.frame_type = VideoFrameType::kVideoFrameDelta;
|
||||
RtpPacketReceived::ExtensionManager extension_manager;
|
||||
RtpPacketizer::PayloadSizeLimits limits;
|
||||
limits.max_payload_len = sizeof(kPayload) - 1;
|
||||
|
||||
auto packetizer =
|
||||
RtpPacketizer::Create(kVideoCodecGeneric, kPayload, limits, video_header);
|
||||
ASSERT_THAT(packetizer->NumPackets(), Eq(2U));
|
||||
|
||||
{
|
||||
RtpPacketToSend send_packet(&extension_manager);
|
||||
packetizer->NextPacket(&send_packet);
|
||||
send_packet.SetSequenceNumber(123);
|
||||
RtpPacketReceived received_packet(&extension_manager);
|
||||
received_packet.Parse(send_packet.Buffer());
|
||||
assembler.InsertPacket(received_packet);
|
||||
}
|
||||
|
||||
{
|
||||
RtpPacketToSend send_packet(&extension_manager);
|
||||
packetizer->NextPacket(&send_packet);
|
||||
send_packet.SetSequenceNumber(124);
|
||||
RtpPacketReceived received_packet(&extension_manager);
|
||||
received_packet.Parse(send_packet.Buffer());
|
||||
assembler.InsertPacket(received_packet);
|
||||
}
|
||||
|
||||
AppendFrames(assembler.InsertPacket(PaddingPacket(/*seq_num=*/122)), frames);
|
||||
|
||||
ASSERT_THAT(frames, SizeIs(1));
|
||||
EXPECT_THAT(frames[0].RtpSeqNumStart(), Eq(123));
|
||||
EXPECT_THAT(frames[0].RtpSeqNumEnd(), Eq(124));
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace webrtc
|
||||
56
TMessagesProj/jni/voip/webrtc/api/video/test/BUILD.gn
Normal file
56
TMessagesProj/jni/voip/webrtc/api/video/test/BUILD.gn
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
# that can be found in the LICENSE file in the root of the source
|
||||
# tree. An additional intellectual property rights grant can be found
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
import("../../../webrtc.gni")
|
||||
|
||||
rtc_library("rtc_api_video_unittests") {
|
||||
testonly = true
|
||||
sources = [
|
||||
"color_space_unittest.cc",
|
||||
"i210_buffer_unittest.cc",
|
||||
"i410_buffer_unittest.cc",
|
||||
"i422_buffer_unittest.cc",
|
||||
"i444_buffer_unittest.cc",
|
||||
"nv12_buffer_unittest.cc",
|
||||
"video_adaptation_counters_unittest.cc",
|
||||
"video_bitrate_allocation_unittest.cc",
|
||||
]
|
||||
deps = [
|
||||
"..:video_adaptation",
|
||||
"..:video_bitrate_allocation",
|
||||
"..:video_frame",
|
||||
"..:video_frame_i010",
|
||||
"..:video_rtp_headers",
|
||||
"../../../test:frame_utils",
|
||||
"../../../test:test_support",
|
||||
]
|
||||
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
|
||||
}
|
||||
|
||||
rtc_source_set("mock_recordable_encoded_frame") {
|
||||
testonly = true
|
||||
visibility = [ "*" ]
|
||||
sources = [ "mock_recordable_encoded_frame.h" ]
|
||||
|
||||
deps = [
|
||||
"..:recordable_encoded_frame",
|
||||
"../../../test:test_support",
|
||||
]
|
||||
}
|
||||
|
||||
rtc_source_set("video_frame_matchers") {
|
||||
testonly = true
|
||||
visibility = [ "*" ]
|
||||
sources = [ "video_frame_matchers.h" ]
|
||||
|
||||
deps = [
|
||||
"..:video_frame",
|
||||
"../..:rtp_packet_info",
|
||||
"../../../test:test_support",
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,83 @@
|
|||
/*
|
||||
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/color_space.h"
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#include "test/gtest.h"
|
||||
|
||||
namespace webrtc {
|
||||
TEST(ColorSpace, TestSettingPrimariesFromUint8) {
|
||||
ColorSpace color_space;
|
||||
EXPECT_TRUE(color_space.set_primaries_from_uint8(
|
||||
static_cast<uint8_t>(ColorSpace::PrimaryID::kBT470BG)));
|
||||
EXPECT_EQ(ColorSpace::PrimaryID::kBT470BG, color_space.primaries());
|
||||
EXPECT_FALSE(color_space.set_primaries_from_uint8(3));
|
||||
EXPECT_FALSE(color_space.set_primaries_from_uint8(23));
|
||||
EXPECT_FALSE(color_space.set_primaries_from_uint8(64));
|
||||
}
|
||||
|
||||
TEST(ColorSpace, TestSettingTransferFromUint8) {
|
||||
ColorSpace color_space;
|
||||
EXPECT_TRUE(color_space.set_transfer_from_uint8(
|
||||
static_cast<uint8_t>(ColorSpace::TransferID::kBT2020_10)));
|
||||
EXPECT_EQ(ColorSpace::TransferID::kBT2020_10, color_space.transfer());
|
||||
EXPECT_FALSE(color_space.set_transfer_from_uint8(3));
|
||||
EXPECT_FALSE(color_space.set_transfer_from_uint8(19));
|
||||
EXPECT_FALSE(color_space.set_transfer_from_uint8(128));
|
||||
}
|
||||
|
||||
TEST(ColorSpace, TestSettingMatrixFromUint8) {
|
||||
ColorSpace color_space;
|
||||
EXPECT_TRUE(color_space.set_matrix_from_uint8(
|
||||
static_cast<uint8_t>(ColorSpace::MatrixID::kCDNCLS)));
|
||||
EXPECT_EQ(ColorSpace::MatrixID::kCDNCLS, color_space.matrix());
|
||||
EXPECT_FALSE(color_space.set_matrix_from_uint8(3));
|
||||
EXPECT_FALSE(color_space.set_matrix_from_uint8(15));
|
||||
EXPECT_FALSE(color_space.set_matrix_from_uint8(255));
|
||||
}
|
||||
|
||||
TEST(ColorSpace, TestSettingRangeFromUint8) {
|
||||
ColorSpace color_space;
|
||||
EXPECT_TRUE(color_space.set_range_from_uint8(
|
||||
static_cast<uint8_t>(ColorSpace::RangeID::kFull)));
|
||||
EXPECT_EQ(ColorSpace::RangeID::kFull, color_space.range());
|
||||
EXPECT_FALSE(color_space.set_range_from_uint8(4));
|
||||
}
|
||||
|
||||
TEST(ColorSpace, TestSettingChromaSitingHorizontalFromUint8) {
|
||||
ColorSpace color_space;
|
||||
EXPECT_TRUE(color_space.set_chroma_siting_horizontal_from_uint8(
|
||||
static_cast<uint8_t>(ColorSpace::ChromaSiting::kCollocated)));
|
||||
EXPECT_EQ(ColorSpace::ChromaSiting::kCollocated,
|
||||
color_space.chroma_siting_horizontal());
|
||||
EXPECT_FALSE(color_space.set_chroma_siting_horizontal_from_uint8(3));
|
||||
}
|
||||
|
||||
TEST(ColorSpace, TestSettingChromaSitingVerticalFromUint8) {
|
||||
ColorSpace color_space;
|
||||
EXPECT_TRUE(color_space.set_chroma_siting_vertical_from_uint8(
|
||||
static_cast<uint8_t>(ColorSpace::ChromaSiting::kHalf)));
|
||||
EXPECT_EQ(ColorSpace::ChromaSiting::kHalf,
|
||||
color_space.chroma_siting_vertical());
|
||||
EXPECT_FALSE(color_space.set_chroma_siting_vertical_from_uint8(3));
|
||||
}
|
||||
|
||||
TEST(ColorSpace, TestAsStringFunction) {
|
||||
ColorSpace color_space(
|
||||
ColorSpace::PrimaryID::kBT709, ColorSpace::TransferID::kBT709,
|
||||
ColorSpace::MatrixID::kBT709, ColorSpace::RangeID::kLimited);
|
||||
EXPECT_EQ(
|
||||
color_space.AsString(),
|
||||
"{primaries:kBT709, transfer:kBT709, matrix:kBT709, range:kLimited}");
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
@ -0,0 +1,126 @@
|
|||
|
||||
/*
|
||||
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/i210_buffer.h"
|
||||
|
||||
#include "api/video/i420_buffer.h"
|
||||
#include "test/frame_utils.h"
|
||||
#include "test/gmock.h"
|
||||
#include "test/gtest.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace {
|
||||
|
||||
int GetY(rtc::scoped_refptr<I210BufferInterface> buf, int col, int row) {
|
||||
return buf->DataY()[row * buf->StrideY() + col];
|
||||
}
|
||||
|
||||
int GetU(rtc::scoped_refptr<I210BufferInterface> buf, int col, int row) {
|
||||
return buf->DataU()[row * buf->StrideU() + col];
|
||||
}
|
||||
|
||||
int GetV(rtc::scoped_refptr<I210BufferInterface> buf, int col, int row) {
|
||||
return buf->DataV()[row * buf->StrideV() + col];
|
||||
}
|
||||
|
||||
void FillI210Buffer(rtc::scoped_refptr<I210Buffer> buf) {
|
||||
const uint16_t Y = 4;
|
||||
const uint16_t U = 8;
|
||||
const uint16_t V = 16;
|
||||
for (int row = 0; row < buf->height(); ++row) {
|
||||
for (int col = 0; col < buf->width(); ++col) {
|
||||
buf->MutableDataY()[row * buf->StrideY() + col] = Y;
|
||||
}
|
||||
}
|
||||
for (int row = 0; row < buf->ChromaHeight(); ++row) {
|
||||
for (int col = 0; col < buf->ChromaWidth(); ++col) {
|
||||
buf->MutableDataU()[row * buf->StrideU() + col] = U;
|
||||
buf->MutableDataV()[row * buf->StrideV() + col] = V;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
TEST(I210BufferTest, InitialData) {
|
||||
constexpr int stride = 3;
|
||||
constexpr int halfstride = (stride + 1) >> 1;
|
||||
constexpr int width = 3;
|
||||
constexpr int halfwidth = (width + 1) >> 1;
|
||||
constexpr int height = 3;
|
||||
|
||||
rtc::scoped_refptr<I210Buffer> i210_buffer(I210Buffer::Create(width, height));
|
||||
EXPECT_EQ(width, i210_buffer->width());
|
||||
EXPECT_EQ(height, i210_buffer->height());
|
||||
EXPECT_EQ(stride, i210_buffer->StrideY());
|
||||
EXPECT_EQ(halfstride, i210_buffer->StrideU());
|
||||
EXPECT_EQ(halfstride, i210_buffer->StrideV());
|
||||
EXPECT_EQ(halfwidth, i210_buffer->ChromaWidth());
|
||||
EXPECT_EQ(height, i210_buffer->ChromaHeight());
|
||||
}
|
||||
|
||||
TEST(I210BufferTest, ReadPixels) {
|
||||
constexpr int width = 3;
|
||||
constexpr int halfwidth = (width + 1) >> 1;
|
||||
constexpr int height = 3;
|
||||
|
||||
rtc::scoped_refptr<I210Buffer> i210_buffer(I210Buffer::Create(width, height));
|
||||
// Y = 4, U = 8, V = 16.
|
||||
FillI210Buffer(i210_buffer);
|
||||
for (int row = 0; row < height; row++) {
|
||||
for (int col = 0; col < width; col++) {
|
||||
EXPECT_EQ(4, GetY(i210_buffer, col, row));
|
||||
}
|
||||
}
|
||||
for (int row = 0; row < height; row++) {
|
||||
for (int col = 0; col < halfwidth; col++) {
|
||||
EXPECT_EQ(8, GetU(i210_buffer, col, row));
|
||||
EXPECT_EQ(16, GetV(i210_buffer, col, row));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TEST(I210BufferTest, ToI420) {
|
||||
constexpr int width = 3;
|
||||
constexpr int halfwidth = (width + 1) >> 1;
|
||||
constexpr int height = 3;
|
||||
constexpr int size = width * height;
|
||||
constexpr int quartersize = (width + 1) / 2 * (height + 1) / 2;
|
||||
rtc::scoped_refptr<I420Buffer> reference(I420Buffer::Create(width, height));
|
||||
memset(reference->MutableDataY(), 1, size);
|
||||
memset(reference->MutableDataU(), 2, quartersize);
|
||||
memset(reference->MutableDataV(), 4, quartersize);
|
||||
|
||||
rtc::scoped_refptr<I210Buffer> i210_buffer(I210Buffer::Create(width, height));
|
||||
// Y = 4, U = 8, V = 16.
|
||||
FillI210Buffer(i210_buffer);
|
||||
|
||||
// Confirm YUV values are as expected.
|
||||
for (int row = 0; row < height; row++) {
|
||||
for (int col = 0; col < width; col++) {
|
||||
EXPECT_EQ(4, GetY(i210_buffer, col, row));
|
||||
}
|
||||
}
|
||||
for (int row = 0; row < height; row++) {
|
||||
for (int col = 0; col < halfwidth; col++) {
|
||||
EXPECT_EQ(8, GetU(i210_buffer, col, row));
|
||||
EXPECT_EQ(16, GetV(i210_buffer, col, row));
|
||||
}
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<I420BufferInterface> i420_buffer(i210_buffer->ToI420());
|
||||
EXPECT_TRUE(test::FrameBufsEqual(reference, i420_buffer));
|
||||
EXPECT_EQ(height, i420_buffer->height());
|
||||
EXPECT_EQ(width, i420_buffer->width());
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
@ -0,0 +1,120 @@
|
|||
|
||||
/*
|
||||
* Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/i410_buffer.h"
|
||||
|
||||
#include "api/video/i420_buffer.h"
|
||||
#include "test/frame_utils.h"
|
||||
#include "test/gmock.h"
|
||||
#include "test/gtest.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace {
|
||||
constexpr uint16_t kYValue = 4;
|
||||
constexpr uint16_t kUValue = 8;
|
||||
constexpr uint16_t kVValue = 16;
|
||||
|
||||
int GetY(rtc::scoped_refptr<I410BufferInterface> buf, int col, int row) {
|
||||
return buf->DataY()[row * buf->StrideY() + col];
|
||||
}
|
||||
|
||||
int GetU(rtc::scoped_refptr<I410BufferInterface> buf, int col, int row) {
|
||||
return buf->DataU()[row * buf->StrideU() + col];
|
||||
}
|
||||
|
||||
int GetV(rtc::scoped_refptr<I410BufferInterface> buf, int col, int row) {
|
||||
return buf->DataV()[row * buf->StrideV() + col];
|
||||
}
|
||||
|
||||
void FillI410Buffer(rtc::scoped_refptr<I410Buffer> buf) {
|
||||
for (int row = 0; row < buf->height(); ++row) {
|
||||
for (int col = 0; col < buf->width(); ++col) {
|
||||
buf->MutableDataY()[row * buf->StrideY() + col] = kYValue;
|
||||
buf->MutableDataU()[row * buf->StrideU() + col] = kUValue;
|
||||
buf->MutableDataV()[row * buf->StrideV() + col] = kVValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
TEST(I410BufferTest, InitialData) {
|
||||
constexpr int stride = 3;
|
||||
constexpr int width = 3;
|
||||
constexpr int height = 3;
|
||||
|
||||
rtc::scoped_refptr<I410Buffer> i410_buffer(I410Buffer::Create(width, height));
|
||||
EXPECT_EQ(width, i410_buffer->width());
|
||||
EXPECT_EQ(height, i410_buffer->height());
|
||||
EXPECT_EQ(stride, i410_buffer->StrideY());
|
||||
EXPECT_EQ(stride, i410_buffer->StrideU());
|
||||
EXPECT_EQ(stride, i410_buffer->StrideV());
|
||||
EXPECT_EQ(3, i410_buffer->ChromaWidth());
|
||||
EXPECT_EQ(3, i410_buffer->ChromaHeight());
|
||||
}
|
||||
|
||||
TEST(I410BufferTest, ReadPixels) {
|
||||
constexpr int width = 3;
|
||||
constexpr int height = 3;
|
||||
|
||||
rtc::scoped_refptr<I410Buffer> i410_buffer(I410Buffer::Create(width, height));
|
||||
FillI410Buffer(i410_buffer);
|
||||
for (int row = 0; row < height; row++) {
|
||||
for (int col = 0; col < width; col++) {
|
||||
EXPECT_EQ(kYValue, GetY(i410_buffer, col, row));
|
||||
EXPECT_EQ(kUValue, GetU(i410_buffer, col, row));
|
||||
EXPECT_EQ(kVValue, GetV(i410_buffer, col, row));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TEST(I410BufferTest, ToI420) {
|
||||
// libyuv I410ToI420 only handles correctly even sizes and skips last row/col
|
||||
// if odd.
|
||||
constexpr int width = 4;
|
||||
constexpr int height = 4;
|
||||
constexpr int size_y = width * height;
|
||||
constexpr int size_u = (width + 1) / 2 * (height + 1) / 2;
|
||||
constexpr int size_v = (width + 1) / 2 * (height + 1) / 2;
|
||||
rtc::scoped_refptr<I420Buffer> reference(I420Buffer::Create(width, height));
|
||||
// I410 is 10-bit while I420 is 8 bit, so last 2 bits would be discarded.
|
||||
memset(reference->MutableDataY(), kYValue >> 2, size_y);
|
||||
memset(reference->MutableDataU(), kUValue >> 2, size_u);
|
||||
memset(reference->MutableDataV(), kVValue >> 2, size_v);
|
||||
|
||||
rtc::scoped_refptr<I410Buffer> i410_buffer(I410Buffer::Create(width, height));
|
||||
FillI410Buffer(i410_buffer);
|
||||
|
||||
// Confirm YUV values are as expected.
|
||||
for (int row = 0; row < height; row++) {
|
||||
for (int col = 0; col < width; col++) {
|
||||
EXPECT_EQ(kYValue, GetY(i410_buffer, col, row));
|
||||
EXPECT_EQ(kUValue, GetU(i410_buffer, col, row));
|
||||
EXPECT_EQ(kVValue, GetV(i410_buffer, col, row));
|
||||
}
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<I420BufferInterface> i420_buffer(i410_buffer->ToI420());
|
||||
|
||||
// Confirm YUV values are as expected.
|
||||
for (int row = 0; row < height; row++) {
|
||||
for (int col = 0; col < width; col++) {
|
||||
EXPECT_EQ(1, i420_buffer->DataY()[row * i420_buffer->StrideY() + col]);
|
||||
}
|
||||
}
|
||||
|
||||
EXPECT_EQ(height, i420_buffer->height());
|
||||
EXPECT_EQ(width, i420_buffer->width());
|
||||
EXPECT_TRUE(test::FrameBufsEqual(reference, i420_buffer));
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
@ -0,0 +1,128 @@
|
|||
|
||||
/*
|
||||
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/i422_buffer.h"
|
||||
|
||||
#include "api/video/i420_buffer.h"
|
||||
#include "test/frame_utils.h"
|
||||
#include "test/gmock.h"
|
||||
#include "test/gtest.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace {
|
||||
int GetY(rtc::scoped_refptr<I422BufferInterface> buf, int col, int row) {
|
||||
return buf->DataY()[row * buf->StrideY() + col];
|
||||
}
|
||||
|
||||
int GetU(rtc::scoped_refptr<I422BufferInterface> buf, int col, int row) {
|
||||
return buf->DataU()[row * buf->StrideU() + col];
|
||||
}
|
||||
|
||||
int GetV(rtc::scoped_refptr<I422BufferInterface> buf, int col, int row) {
|
||||
return buf->DataV()[row * buf->StrideV() + col];
|
||||
}
|
||||
|
||||
void FillI422Buffer(rtc::scoped_refptr<I422Buffer> buf) {
|
||||
const uint8_t Y = 1;
|
||||
const uint8_t U = 2;
|
||||
const uint8_t V = 3;
|
||||
for (int row = 0; row < buf->height(); ++row) {
|
||||
for (int col = 0; col < buf->width(); ++col) {
|
||||
buf->MutableDataY()[row * buf->StrideY() + col] = Y;
|
||||
}
|
||||
}
|
||||
for (int row = 0; row < buf->ChromaHeight(); ++row) {
|
||||
for (int col = 0; col < buf->ChromaWidth(); ++col) {
|
||||
buf->MutableDataU()[row * buf->StrideU() + col] = U;
|
||||
buf->MutableDataV()[row * buf->StrideV() + col] = V;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
TEST(I422BufferTest, InitialData) {
|
||||
constexpr int stride = 3;
|
||||
constexpr int halfstride = (stride + 1) >> 1;
|
||||
constexpr int width = 3;
|
||||
constexpr int halfwidth = (width + 1) >> 1;
|
||||
constexpr int height = 3;
|
||||
|
||||
rtc::scoped_refptr<I422Buffer> i422_buffer(I422Buffer::Create(width, height));
|
||||
EXPECT_EQ(width, i422_buffer->width());
|
||||
EXPECT_EQ(height, i422_buffer->height());
|
||||
EXPECT_EQ(stride, i422_buffer->StrideY());
|
||||
EXPECT_EQ(halfstride, i422_buffer->StrideU());
|
||||
EXPECT_EQ(halfstride, i422_buffer->StrideV());
|
||||
EXPECT_EQ(halfwidth, i422_buffer->ChromaWidth());
|
||||
EXPECT_EQ(height, i422_buffer->ChromaHeight());
|
||||
}
|
||||
|
||||
TEST(I422BufferTest, ReadPixels) {
|
||||
constexpr int width = 3;
|
||||
constexpr int halfwidth = (width + 1) >> 1;
|
||||
constexpr int height = 3;
|
||||
|
||||
rtc::scoped_refptr<I422Buffer> i422_buffer(I422Buffer::Create(width, height));
|
||||
// Y = 1, U = 2, V = 3.
|
||||
FillI422Buffer(i422_buffer);
|
||||
for (int row = 0; row < height; row++) {
|
||||
for (int col = 0; col < width; col++) {
|
||||
EXPECT_EQ(1, GetY(i422_buffer, col, row));
|
||||
}
|
||||
}
|
||||
for (int row = 0; row < height; row++) {
|
||||
for (int col = 0; col < halfwidth; col++) {
|
||||
EXPECT_EQ(2, GetU(i422_buffer, col, row));
|
||||
EXPECT_EQ(3, GetV(i422_buffer, col, row));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TEST(I422BufferTest, ToI420) {
|
||||
constexpr int width = 3;
|
||||
constexpr int halfwidth = (width + 1) >> 1;
|
||||
constexpr int height = 3;
|
||||
constexpr int size = width * height;
|
||||
constexpr int halfsize = (width + 1) / 2 * height;
|
||||
constexpr int quartersize = (width + 1) / 2 * (height + 1) / 2;
|
||||
rtc::scoped_refptr<I420Buffer> reference(I420Buffer::Create(width, height));
|
||||
memset(reference->MutableDataY(), 8, size);
|
||||
memset(reference->MutableDataU(), 4, quartersize);
|
||||
memset(reference->MutableDataV(), 2, quartersize);
|
||||
|
||||
rtc::scoped_refptr<I422Buffer> i422_buffer(I422Buffer::Create(width, height));
|
||||
// Convert the reference buffer to I422.
|
||||
memset(i422_buffer->MutableDataY(), 8, size);
|
||||
memset(i422_buffer->MutableDataU(), 4, halfsize);
|
||||
memset(i422_buffer->MutableDataV(), 2, halfsize);
|
||||
|
||||
// Confirm YUV values are as expected.
|
||||
for (int row = 0; row < height; row++) {
|
||||
for (int col = 0; col < width; col++) {
|
||||
EXPECT_EQ(8, GetY(i422_buffer, col, row));
|
||||
}
|
||||
}
|
||||
for (int row = 0; row < height; row++) {
|
||||
for (int col = 0; col < halfwidth; col++) {
|
||||
EXPECT_EQ(4, GetU(i422_buffer, col, row));
|
||||
EXPECT_EQ(2, GetV(i422_buffer, col, row));
|
||||
}
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<I420BufferInterface> i420_buffer(i422_buffer->ToI420());
|
||||
EXPECT_EQ(height, i420_buffer->height());
|
||||
EXPECT_EQ(width, i420_buffer->width());
|
||||
EXPECT_TRUE(test::FrameBufsEqual(reference, i420_buffer));
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
@ -0,0 +1,112 @@
|
|||
|
||||
/*
|
||||
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/i444_buffer.h"
|
||||
|
||||
#include "api/video/i420_buffer.h"
|
||||
#include "test/frame_utils.h"
|
||||
#include "test/gmock.h"
|
||||
#include "test/gtest.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace {
|
||||
int GetY(rtc::scoped_refptr<I444BufferInterface> buf, int col, int row) {
|
||||
return buf->DataY()[row * buf->StrideY() + col];
|
||||
}
|
||||
|
||||
int GetU(rtc::scoped_refptr<I444BufferInterface> buf, int col, int row) {
|
||||
return buf->DataU()[row * buf->StrideU() + col];
|
||||
}
|
||||
|
||||
int GetV(rtc::scoped_refptr<I444BufferInterface> buf, int col, int row) {
|
||||
return buf->DataV()[row * buf->StrideV() + col];
|
||||
}
|
||||
|
||||
void FillI444Buffer(rtc::scoped_refptr<I444Buffer> buf) {
|
||||
const uint8_t Y = 1;
|
||||
const uint8_t U = 2;
|
||||
const uint8_t V = 3;
|
||||
for (int row = 0; row < buf->height(); ++row) {
|
||||
for (int col = 0; col < buf->width(); ++col) {
|
||||
buf->MutableDataY()[row * buf->StrideY() + col] = Y;
|
||||
buf->MutableDataU()[row * buf->StrideU() + col] = U;
|
||||
buf->MutableDataV()[row * buf->StrideV() + col] = V;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
TEST(I444BufferTest, InitialData) {
|
||||
constexpr int stride = 3;
|
||||
constexpr int width = 3;
|
||||
constexpr int height = 3;
|
||||
|
||||
rtc::scoped_refptr<I444Buffer> i444_buffer(I444Buffer::Create(width, height));
|
||||
EXPECT_EQ(width, i444_buffer->width());
|
||||
EXPECT_EQ(height, i444_buffer->height());
|
||||
EXPECT_EQ(stride, i444_buffer->StrideY());
|
||||
EXPECT_EQ(stride, i444_buffer->StrideU());
|
||||
EXPECT_EQ(stride, i444_buffer->StrideV());
|
||||
EXPECT_EQ(3, i444_buffer->ChromaWidth());
|
||||
EXPECT_EQ(3, i444_buffer->ChromaHeight());
|
||||
}
|
||||
|
||||
TEST(I444BufferTest, ReadPixels) {
|
||||
constexpr int width = 3;
|
||||
constexpr int height = 3;
|
||||
|
||||
rtc::scoped_refptr<I444Buffer> i444_buffer(I444Buffer::Create(width, height));
|
||||
// Y = 1, U = 2, V = 3.
|
||||
FillI444Buffer(i444_buffer);
|
||||
for (int row = 0; row < height; row++) {
|
||||
for (int col = 0; col < width; col++) {
|
||||
EXPECT_EQ(1, GetY(i444_buffer, col, row));
|
||||
EXPECT_EQ(2, GetU(i444_buffer, col, row));
|
||||
EXPECT_EQ(3, GetV(i444_buffer, col, row));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TEST(I444BufferTest, ToI420) {
|
||||
constexpr int width = 3;
|
||||
constexpr int height = 3;
|
||||
constexpr int size_y = width * height;
|
||||
constexpr int size_u = (width + 1) / 2 * (height + 1) / 2;
|
||||
constexpr int size_v = (width + 1) / 2 * (height + 1) / 2;
|
||||
rtc::scoped_refptr<I420Buffer> reference(I420Buffer::Create(width, height));
|
||||
memset(reference->MutableDataY(), 8, size_y);
|
||||
memset(reference->MutableDataU(), 4, size_u);
|
||||
memset(reference->MutableDataV(), 2, size_v);
|
||||
|
||||
rtc::scoped_refptr<I444Buffer> i444_buffer(I444Buffer::Create(width, height));
|
||||
// Convert the reference buffer to I444.
|
||||
memset(i444_buffer->MutableDataY(), 8, size_y);
|
||||
memset(i444_buffer->MutableDataU(), 4, size_y);
|
||||
memset(i444_buffer->MutableDataV(), 2, size_y);
|
||||
|
||||
// Confirm YUV values are as expected.
|
||||
for (int row = 0; row < height; row++) {
|
||||
for (int col = 0; col < width; col++) {
|
||||
EXPECT_EQ(8, GetY(i444_buffer, col, row));
|
||||
EXPECT_EQ(4, GetU(i444_buffer, col, row));
|
||||
EXPECT_EQ(2, GetV(i444_buffer, col, row));
|
||||
}
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<I420BufferInterface> i420_buffer(i444_buffer->ToI420());
|
||||
EXPECT_EQ(height, i420_buffer->height());
|
||||
EXPECT_EQ(width, i420_buffer->width());
|
||||
EXPECT_TRUE(test::FrameBufsEqual(reference, i420_buffer));
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
/*
|
||||
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_TEST_MOCK_RECORDABLE_ENCODED_FRAME_H_
|
||||
#define API_VIDEO_TEST_MOCK_RECORDABLE_ENCODED_FRAME_H_
|
||||
|
||||
#include "api/video/recordable_encoded_frame.h"
|
||||
#include "test/gmock.h"
|
||||
|
||||
namespace webrtc {
|
||||
class MockRecordableEncodedFrame : public RecordableEncodedFrame {
|
||||
public:
|
||||
MOCK_METHOD(rtc::scoped_refptr<const EncodedImageBufferInterface>,
|
||||
encoded_buffer,
|
||||
(),
|
||||
(const, override));
|
||||
MOCK_METHOD(absl::optional<webrtc::ColorSpace>,
|
||||
color_space,
|
||||
(),
|
||||
(const, override));
|
||||
MOCK_METHOD(VideoCodecType, codec, (), (const, override));
|
||||
MOCK_METHOD(bool, is_key_frame, (), (const, override));
|
||||
MOCK_METHOD(EncodedResolution, resolution, (), (const, override));
|
||||
MOCK_METHOD(Timestamp, render_time, (), (const, override));
|
||||
};
|
||||
} // namespace webrtc
|
||||
#endif // API_VIDEO_TEST_MOCK_RECORDABLE_ENCODED_FRAME_H_
|
||||
|
|
@ -0,0 +1,119 @@
|
|||
/*
|
||||
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/nv12_buffer.h"
|
||||
|
||||
#include "api/video/i420_buffer.h"
|
||||
#include "test/frame_utils.h"
|
||||
#include "test/gmock.h"
|
||||
#include "test/gtest.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
namespace {
|
||||
int GetY(rtc::scoped_refptr<NV12BufferInterface> buf, int col, int row) {
|
||||
return buf->DataY()[row * buf->StrideY() + col];
|
||||
}
|
||||
|
||||
int GetU(rtc::scoped_refptr<NV12BufferInterface> buf, int col, int row) {
|
||||
return buf->DataUV()[(row / 2) * buf->StrideUV() + (col / 2) * 2];
|
||||
}
|
||||
|
||||
int GetV(rtc::scoped_refptr<NV12BufferInterface> buf, int col, int row) {
|
||||
return buf->DataUV()[(row / 2) * buf->StrideUV() + (col / 2) * 2 + 1];
|
||||
}
|
||||
|
||||
void FillNV12Buffer(rtc::scoped_refptr<NV12Buffer> buf) {
|
||||
const uint8_t Y = 1;
|
||||
const uint8_t U = 2;
|
||||
const uint8_t V = 3;
|
||||
for (int row = 0; row < buf->height(); ++row) {
|
||||
for (int col = 0; col < buf->width(); ++col) {
|
||||
buf->MutableDataY()[row * buf->StrideY() + col] = Y;
|
||||
}
|
||||
}
|
||||
// Fill interleaving UV values.
|
||||
for (int row = 0; row < buf->ChromaHeight(); row++) {
|
||||
for (int col = 0; col < buf->StrideUV(); col += 2) {
|
||||
int uv_index = row * buf->StrideUV() + col;
|
||||
buf->MutableDataUV()[uv_index] = U;
|
||||
buf->MutableDataUV()[uv_index + 1] = V;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
TEST(NV12BufferTest, InitialData) {
|
||||
constexpr int stride_y = 3;
|
||||
constexpr int stride_uv = 4;
|
||||
constexpr int width = 3;
|
||||
constexpr int height = 3;
|
||||
|
||||
rtc::scoped_refptr<NV12Buffer> nv12_buffer(NV12Buffer::Create(width, height));
|
||||
EXPECT_EQ(width, nv12_buffer->width());
|
||||
EXPECT_EQ(height, nv12_buffer->height());
|
||||
EXPECT_EQ(stride_y, nv12_buffer->StrideY());
|
||||
EXPECT_EQ(stride_uv, nv12_buffer->StrideUV());
|
||||
EXPECT_EQ(2, nv12_buffer->ChromaWidth());
|
||||
EXPECT_EQ(2, nv12_buffer->ChromaHeight());
|
||||
}
|
||||
|
||||
TEST(NV12BufferTest, ReadPixels) {
|
||||
constexpr int width = 3;
|
||||
constexpr int height = 3;
|
||||
|
||||
rtc::scoped_refptr<NV12Buffer> nv12_buffer(NV12Buffer::Create(width, height));
|
||||
// Y = 1, U = 2, V = 3.
|
||||
FillNV12Buffer(nv12_buffer);
|
||||
for (int row = 0; row < height; row++) {
|
||||
for (int col = 0; col < width; col++) {
|
||||
EXPECT_EQ(1, GetY(nv12_buffer, col, row));
|
||||
EXPECT_EQ(2, GetU(nv12_buffer, col, row));
|
||||
EXPECT_EQ(3, GetV(nv12_buffer, col, row));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TEST(NV12BufferTest, ToI420) {
|
||||
constexpr int width = 3;
|
||||
constexpr int height = 3;
|
||||
constexpr int size_y = width * height;
|
||||
constexpr int size_u = (width + 1) / 2 * (height + 1) / 2;
|
||||
constexpr int size_v = (width + 1) / 2 * (height + 1) / 2;
|
||||
rtc::scoped_refptr<I420Buffer> reference(I420Buffer::Create(width, height));
|
||||
memset(reference->MutableDataY(), 8, size_y);
|
||||
memset(reference->MutableDataU(), 4, size_u);
|
||||
memset(reference->MutableDataV(), 2, size_v);
|
||||
|
||||
rtc::scoped_refptr<NV12Buffer> nv12_buffer(NV12Buffer::Create(width, height));
|
||||
// Convert the reference buffer to NV12.
|
||||
memset(nv12_buffer->MutableDataY(), 8, size_y);
|
||||
// Interleaving u/v values.
|
||||
for (int i = 0; i < size_u + size_v; i += 2) {
|
||||
nv12_buffer->MutableDataUV()[i] = 4;
|
||||
nv12_buffer->MutableDataUV()[i + 1] = 2;
|
||||
}
|
||||
// Confirm YUV values are as expected.
|
||||
for (int row = 0; row < height; row++) {
|
||||
for (int col = 0; col < width; col++) {
|
||||
EXPECT_EQ(8, GetY(nv12_buffer, col, row));
|
||||
EXPECT_EQ(4, GetU(nv12_buffer, col, row));
|
||||
EXPECT_EQ(2, GetV(nv12_buffer, col, row));
|
||||
}
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<I420BufferInterface> i420_buffer(nv12_buffer->ToI420());
|
||||
EXPECT_EQ(height, i420_buffer->height());
|
||||
EXPECT_EQ(width, i420_buffer->width());
|
||||
EXPECT_TRUE(test::FrameBufsEqual(reference, i420_buffer));
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/video_adaptation_counters.h"
|
||||
|
||||
#include "test/gtest.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
TEST(AdaptationCountersTest, Addition) {
|
||||
VideoAdaptationCounters a{0, 0};
|
||||
VideoAdaptationCounters b{1, 2};
|
||||
VideoAdaptationCounters total = a + b;
|
||||
EXPECT_EQ(1, total.resolution_adaptations);
|
||||
EXPECT_EQ(2, total.fps_adaptations);
|
||||
}
|
||||
|
||||
TEST(AdaptationCountersTest, Equality) {
|
||||
VideoAdaptationCounters a{1, 2};
|
||||
VideoAdaptationCounters b{2, 1};
|
||||
EXPECT_EQ(a, a);
|
||||
EXPECT_NE(a, b);
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
@ -0,0 +1,64 @@
|
|||
/*
|
||||
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/video_bitrate_allocation.h"
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "test/gtest.h"
|
||||
|
||||
namespace webrtc {
|
||||
TEST(VideoBitrateAllocation, SimulcastTargetBitrate) {
|
||||
VideoBitrateAllocation bitrate;
|
||||
bitrate.SetBitrate(0, 0, 10000);
|
||||
bitrate.SetBitrate(0, 1, 20000);
|
||||
bitrate.SetBitrate(1, 0, 40000);
|
||||
bitrate.SetBitrate(1, 1, 80000);
|
||||
|
||||
VideoBitrateAllocation layer0_bitrate;
|
||||
layer0_bitrate.SetBitrate(0, 0, 10000);
|
||||
layer0_bitrate.SetBitrate(0, 1, 20000);
|
||||
|
||||
VideoBitrateAllocation layer1_bitrate;
|
||||
layer1_bitrate.SetBitrate(0, 0, 40000);
|
||||
layer1_bitrate.SetBitrate(0, 1, 80000);
|
||||
|
||||
std::vector<absl::optional<VideoBitrateAllocation>> layer_allocations =
|
||||
bitrate.GetSimulcastAllocations();
|
||||
|
||||
EXPECT_EQ(layer0_bitrate, layer_allocations[0]);
|
||||
EXPECT_EQ(layer1_bitrate, layer_allocations[1]);
|
||||
}
|
||||
|
||||
TEST(VideoBitrateAllocation, SimulcastTargetBitrateWithInactiveStream) {
|
||||
// Create bitrate allocation with bitrate only for the first and third stream.
|
||||
VideoBitrateAllocation bitrate;
|
||||
bitrate.SetBitrate(0, 0, 10000);
|
||||
bitrate.SetBitrate(0, 1, 20000);
|
||||
bitrate.SetBitrate(2, 0, 40000);
|
||||
bitrate.SetBitrate(2, 1, 80000);
|
||||
|
||||
VideoBitrateAllocation layer0_bitrate;
|
||||
layer0_bitrate.SetBitrate(0, 0, 10000);
|
||||
layer0_bitrate.SetBitrate(0, 1, 20000);
|
||||
|
||||
VideoBitrateAllocation layer2_bitrate;
|
||||
layer2_bitrate.SetBitrate(0, 0, 40000);
|
||||
layer2_bitrate.SetBitrate(0, 1, 80000);
|
||||
|
||||
std::vector<absl::optional<VideoBitrateAllocation>> layer_allocations =
|
||||
bitrate.GetSimulcastAllocations();
|
||||
|
||||
EXPECT_EQ(layer0_bitrate, layer_allocations[0]);
|
||||
EXPECT_FALSE(layer_allocations[1]);
|
||||
EXPECT_EQ(layer2_bitrate, layer_allocations[2]);
|
||||
}
|
||||
} // namespace webrtc
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
/*
|
||||
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_TEST_VIDEO_FRAME_MATCHERS_H_
|
||||
#define API_VIDEO_TEST_VIDEO_FRAME_MATCHERS_H_
|
||||
|
||||
#include "api/rtp_packet_infos.h"
|
||||
#include "api/video/video_frame.h"
|
||||
#include "test/gmock.h"
|
||||
|
||||
namespace webrtc::test::video_frame_matchers {
|
||||
|
||||
MATCHER_P(Rotation, rotation, "") {
|
||||
return ::testing::Matches(::testing::Eq(rotation))(arg.rotation());
|
||||
}
|
||||
|
||||
MATCHER_P(NtpTimestamp, ntp_ts, "") {
|
||||
return arg.ntp_time_ms() == ntp_ts.ms();
|
||||
}
|
||||
|
||||
MATCHER_P(PacketInfos, m, "") {
|
||||
return ::testing::Matches(m)(arg.packet_infos());
|
||||
}
|
||||
|
||||
} // namespace webrtc::test::video_frame_matchers
|
||||
|
||||
#endif // API_VIDEO_TEST_VIDEO_FRAME_MATCHERS_H_
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
/*
|
||||
* Copyright 2020 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/video_adaptation_counters.h"
|
||||
|
||||
#include "rtc_base/strings/string_builder.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
bool VideoAdaptationCounters::operator==(
|
||||
const VideoAdaptationCounters& rhs) const {
|
||||
return fps_adaptations == rhs.fps_adaptations &&
|
||||
resolution_adaptations == rhs.resolution_adaptations;
|
||||
}
|
||||
|
||||
bool VideoAdaptationCounters::operator!=(
|
||||
const VideoAdaptationCounters& rhs) const {
|
||||
return !(rhs == *this);
|
||||
}
|
||||
|
||||
VideoAdaptationCounters VideoAdaptationCounters::operator+(
|
||||
const VideoAdaptationCounters& other) const {
|
||||
return VideoAdaptationCounters(
|
||||
resolution_adaptations + other.resolution_adaptations,
|
||||
fps_adaptations + other.fps_adaptations);
|
||||
}
|
||||
|
||||
std::string VideoAdaptationCounters::ToString() const {
|
||||
rtc::StringBuilder ss;
|
||||
ss << "{ res=" << resolution_adaptations << " fps=" << fps_adaptations
|
||||
<< " }";
|
||||
return ss.Release();
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
@ -0,0 +1,46 @@
|
|||
/*
|
||||
* Copyright 2020 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_VIDEO_ADAPTATION_COUNTERS_H_
|
||||
#define API_VIDEO_VIDEO_ADAPTATION_COUNTERS_H_
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "rtc_base/checks.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Counts the number of adaptations have resulted due to resource overuse.
|
||||
// Today we can adapt resolution and fps.
|
||||
struct VideoAdaptationCounters {
|
||||
VideoAdaptationCounters() : resolution_adaptations(0), fps_adaptations(0) {}
|
||||
VideoAdaptationCounters(int resolution_adaptations, int fps_adaptations)
|
||||
: resolution_adaptations(resolution_adaptations),
|
||||
fps_adaptations(fps_adaptations) {
|
||||
RTC_DCHECK_GE(resolution_adaptations, 0);
|
||||
RTC_DCHECK_GE(fps_adaptations, 0);
|
||||
}
|
||||
|
||||
int Total() const { return fps_adaptations + resolution_adaptations; }
|
||||
|
||||
bool operator==(const VideoAdaptationCounters& rhs) const;
|
||||
bool operator!=(const VideoAdaptationCounters& rhs) const;
|
||||
|
||||
VideoAdaptationCounters operator+(const VideoAdaptationCounters& other) const;
|
||||
|
||||
std::string ToString() const;
|
||||
|
||||
int resolution_adaptations;
|
||||
int fps_adaptations;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_VIDEO_ADAPTATION_COUNTERS_H_
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
/*
|
||||
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_VIDEO_ADAPTATION_REASON_H_
|
||||
#define API_VIDEO_VIDEO_ADAPTATION_REASON_H_
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
enum class VideoAdaptationReason { kQuality, kCpu };
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_VIDEO_ADAPTATION_REASON_H_
|
||||
|
|
@ -0,0 +1,185 @@
|
|||
/*
|
||||
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/video_bitrate_allocation.h"
|
||||
|
||||
#include <cstdint>
|
||||
|
||||
#include "rtc_base/checks.h"
|
||||
#include "rtc_base/numerics/safe_conversions.h"
|
||||
#include "rtc_base/strings/string_builder.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
VideoBitrateAllocation::VideoBitrateAllocation()
|
||||
: sum_(0), is_bw_limited_(false) {}
|
||||
|
||||
bool VideoBitrateAllocation::SetBitrate(size_t spatial_index,
|
||||
size_t temporal_index,
|
||||
uint32_t bitrate_bps) {
|
||||
RTC_CHECK_LT(spatial_index, kMaxSpatialLayers);
|
||||
RTC_CHECK_LT(temporal_index, kMaxTemporalStreams);
|
||||
int64_t new_bitrate_sum_bps = sum_;
|
||||
absl::optional<uint32_t>& layer_bitrate =
|
||||
bitrates_[spatial_index][temporal_index];
|
||||
if (layer_bitrate) {
|
||||
RTC_DCHECK_LE(*layer_bitrate, sum_);
|
||||
new_bitrate_sum_bps -= *layer_bitrate;
|
||||
}
|
||||
new_bitrate_sum_bps += bitrate_bps;
|
||||
if (new_bitrate_sum_bps > kMaxBitrateBps)
|
||||
return false;
|
||||
|
||||
layer_bitrate = bitrate_bps;
|
||||
sum_ = rtc::dchecked_cast<uint32_t>(new_bitrate_sum_bps);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool VideoBitrateAllocation::HasBitrate(size_t spatial_index,
|
||||
size_t temporal_index) const {
|
||||
RTC_CHECK_LT(spatial_index, kMaxSpatialLayers);
|
||||
RTC_CHECK_LT(temporal_index, kMaxTemporalStreams);
|
||||
return bitrates_[spatial_index][temporal_index].has_value();
|
||||
}
|
||||
|
||||
uint32_t VideoBitrateAllocation::GetBitrate(size_t spatial_index,
|
||||
size_t temporal_index) const {
|
||||
RTC_CHECK_LT(spatial_index, kMaxSpatialLayers);
|
||||
RTC_CHECK_LT(temporal_index, kMaxTemporalStreams);
|
||||
return bitrates_[spatial_index][temporal_index].value_or(0);
|
||||
}
|
||||
|
||||
// Whether the specific spatial layers has the bitrate set in any of its
|
||||
// temporal layers.
|
||||
bool VideoBitrateAllocation::IsSpatialLayerUsed(size_t spatial_index) const {
|
||||
RTC_CHECK_LT(spatial_index, kMaxSpatialLayers);
|
||||
for (size_t i = 0; i < kMaxTemporalStreams; ++i) {
|
||||
if (bitrates_[spatial_index][i].has_value())
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// Get the sum of all the temporal layer for a specific spatial layer.
|
||||
uint32_t VideoBitrateAllocation::GetSpatialLayerSum(
|
||||
size_t spatial_index) const {
|
||||
RTC_CHECK_LT(spatial_index, kMaxSpatialLayers);
|
||||
return GetTemporalLayerSum(spatial_index, kMaxTemporalStreams - 1);
|
||||
}
|
||||
|
||||
uint32_t VideoBitrateAllocation::GetTemporalLayerSum(
|
||||
size_t spatial_index,
|
||||
size_t temporal_index) const {
|
||||
RTC_CHECK_LT(spatial_index, kMaxSpatialLayers);
|
||||
RTC_CHECK_LT(temporal_index, kMaxTemporalStreams);
|
||||
uint32_t sum = 0;
|
||||
for (size_t i = 0; i <= temporal_index; ++i) {
|
||||
sum += bitrates_[spatial_index][i].value_or(0);
|
||||
}
|
||||
return sum;
|
||||
}
|
||||
|
||||
std::vector<uint32_t> VideoBitrateAllocation::GetTemporalLayerAllocation(
|
||||
size_t spatial_index) const {
|
||||
RTC_CHECK_LT(spatial_index, kMaxSpatialLayers);
|
||||
std::vector<uint32_t> temporal_rates;
|
||||
|
||||
// Find the highest temporal layer with a defined bitrate in order to
|
||||
// determine the size of the temporal layer allocation.
|
||||
for (size_t i = kMaxTemporalStreams; i > 0; --i) {
|
||||
if (bitrates_[spatial_index][i - 1].has_value()) {
|
||||
temporal_rates.resize(i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < temporal_rates.size(); ++i) {
|
||||
temporal_rates[i] = bitrates_[spatial_index][i].value_or(0);
|
||||
}
|
||||
|
||||
return temporal_rates;
|
||||
}
|
||||
|
||||
std::vector<absl::optional<VideoBitrateAllocation>>
|
||||
VideoBitrateAllocation::GetSimulcastAllocations() const {
|
||||
std::vector<absl::optional<VideoBitrateAllocation>> bitrates;
|
||||
for (size_t si = 0; si < kMaxSpatialLayers; ++si) {
|
||||
absl::optional<VideoBitrateAllocation> layer_bitrate;
|
||||
if (IsSpatialLayerUsed(si)) {
|
||||
layer_bitrate = VideoBitrateAllocation();
|
||||
for (int tl = 0; tl < kMaxTemporalStreams; ++tl) {
|
||||
if (HasBitrate(si, tl))
|
||||
layer_bitrate->SetBitrate(0, tl, GetBitrate(si, tl));
|
||||
}
|
||||
}
|
||||
bitrates.push_back(layer_bitrate);
|
||||
}
|
||||
return bitrates;
|
||||
}
|
||||
|
||||
bool VideoBitrateAllocation::operator==(
|
||||
const VideoBitrateAllocation& other) const {
|
||||
for (size_t si = 0; si < kMaxSpatialLayers; ++si) {
|
||||
for (size_t ti = 0; ti < kMaxTemporalStreams; ++ti) {
|
||||
if (bitrates_[si][ti] != other.bitrates_[si][ti])
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
std::string VideoBitrateAllocation::ToString() const {
|
||||
if (sum_ == 0)
|
||||
return "VideoBitrateAllocation [ [] ]";
|
||||
|
||||
// Max string length in practice is 260, but let's have some overhead and
|
||||
// round up to nearest power of two.
|
||||
char string_buf[512];
|
||||
rtc::SimpleStringBuilder ssb(string_buf);
|
||||
|
||||
ssb << "VideoBitrateAllocation [";
|
||||
uint32_t spatial_cumulator = 0;
|
||||
for (size_t si = 0; si < kMaxSpatialLayers; ++si) {
|
||||
RTC_DCHECK_LE(spatial_cumulator, sum_);
|
||||
if (spatial_cumulator == sum_)
|
||||
break;
|
||||
|
||||
const uint32_t layer_sum = GetSpatialLayerSum(si);
|
||||
if (layer_sum == sum_ && si == 0) {
|
||||
ssb << " [";
|
||||
} else {
|
||||
if (si > 0)
|
||||
ssb << ",";
|
||||
ssb << '\n' << " [";
|
||||
}
|
||||
spatial_cumulator += layer_sum;
|
||||
|
||||
uint32_t temporal_cumulator = 0;
|
||||
for (size_t ti = 0; ti < kMaxTemporalStreams; ++ti) {
|
||||
RTC_DCHECK_LE(temporal_cumulator, layer_sum);
|
||||
if (temporal_cumulator == layer_sum)
|
||||
break;
|
||||
|
||||
if (ti > 0)
|
||||
ssb << ", ";
|
||||
|
||||
uint32_t bitrate = bitrates_[si][ti].value_or(0);
|
||||
ssb << bitrate;
|
||||
temporal_cumulator += bitrate;
|
||||
}
|
||||
ssb << "]";
|
||||
}
|
||||
|
||||
RTC_DCHECK_EQ(spatial_cumulator, sum_);
|
||||
ssb << " ]";
|
||||
return ssb.str();
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
@ -0,0 +1,96 @@
|
|||
/*
|
||||
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_VIDEO_BITRATE_ALLOCATION_H_
|
||||
#define API_VIDEO_VIDEO_BITRATE_ALLOCATION_H_
|
||||
|
||||
#include <stddef.h>
|
||||
#include <stdint.h>
|
||||
|
||||
#include <limits>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/video/video_codec_constants.h"
|
||||
#include "rtc_base/system/rtc_export.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Class that describes how video bitrate, in bps, is allocated across temporal
|
||||
// and spatial layers. Not that bitrates are NOT cumulative. Depending on if
|
||||
// layers are dependent or not, it is up to the user to aggregate.
|
||||
// For each index, the bitrate can also both set and unset. This is used with a
|
||||
// set bps = 0 to signal an explicit "turn off" signal.
|
||||
class RTC_EXPORT VideoBitrateAllocation {
|
||||
public:
|
||||
static constexpr uint32_t kMaxBitrateBps =
|
||||
std::numeric_limits<uint32_t>::max();
|
||||
VideoBitrateAllocation();
|
||||
|
||||
bool SetBitrate(size_t spatial_index,
|
||||
size_t temporal_index,
|
||||
uint32_t bitrate_bps);
|
||||
|
||||
bool HasBitrate(size_t spatial_index, size_t temporal_index) const;
|
||||
|
||||
uint32_t GetBitrate(size_t spatial_index, size_t temporal_index) const;
|
||||
|
||||
// Whether the specific spatial layers has the bitrate set in any of its
|
||||
// temporal layers.
|
||||
bool IsSpatialLayerUsed(size_t spatial_index) const;
|
||||
|
||||
// Get the sum of all the temporal layer for a specific spatial layer.
|
||||
uint32_t GetSpatialLayerSum(size_t spatial_index) const;
|
||||
|
||||
// Sum of bitrates of temporal layers, from layer 0 to `temporal_index`
|
||||
// inclusive, of specified spatial layer `spatial_index`. Bitrates of lower
|
||||
// spatial layers are not included.
|
||||
uint32_t GetTemporalLayerSum(size_t spatial_index,
|
||||
size_t temporal_index) const;
|
||||
|
||||
// Returns a vector of the temporal layer bitrates for the specific spatial
|
||||
// layer. Length of the returned vector is cropped to the highest temporal
|
||||
// layer with a defined bitrate.
|
||||
std::vector<uint32_t> GetTemporalLayerAllocation(size_t spatial_index) const;
|
||||
|
||||
// Returns one VideoBitrateAllocation for each spatial layer. This is used to
|
||||
// configure simulcast streams. Note that the length of the returned vector is
|
||||
// always kMaxSpatialLayers, the optional is unset for unused layers.
|
||||
std::vector<absl::optional<VideoBitrateAllocation>> GetSimulcastAllocations()
|
||||
const;
|
||||
|
||||
uint32_t get_sum_bps() const { return sum_; } // Sum of all bitrates.
|
||||
uint32_t get_sum_kbps() const {
|
||||
// Round down to not exceed the allocated bitrate.
|
||||
return sum_ / 1000;
|
||||
}
|
||||
|
||||
bool operator==(const VideoBitrateAllocation& other) const;
|
||||
inline bool operator!=(const VideoBitrateAllocation& other) const {
|
||||
return !(*this == other);
|
||||
}
|
||||
|
||||
std::string ToString() const;
|
||||
|
||||
// Indicates if the allocation has some layers/streams disabled due to
|
||||
// low available bandwidth.
|
||||
void set_bw_limited(bool limited) { is_bw_limited_ = limited; }
|
||||
bool is_bw_limited() const { return is_bw_limited_; }
|
||||
|
||||
private:
|
||||
uint32_t sum_;
|
||||
absl::optional<uint32_t> bitrates_[kMaxSpatialLayers][kMaxTemporalStreams];
|
||||
bool is_bw_limited_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_VIDEO_BITRATE_ALLOCATION_H_
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
/*
|
||||
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/video_bitrate_allocator.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
VideoBitrateAllocationParameters::VideoBitrateAllocationParameters(
|
||||
uint32_t total_bitrate_bps,
|
||||
uint32_t framerate)
|
||||
: total_bitrate(DataRate::BitsPerSec(total_bitrate_bps)),
|
||||
stable_bitrate(DataRate::BitsPerSec(total_bitrate_bps)),
|
||||
framerate(static_cast<double>(framerate)) {}
|
||||
|
||||
VideoBitrateAllocationParameters::VideoBitrateAllocationParameters(
|
||||
DataRate total_bitrate,
|
||||
double framerate)
|
||||
: total_bitrate(total_bitrate),
|
||||
stable_bitrate(total_bitrate),
|
||||
framerate(framerate) {}
|
||||
|
||||
VideoBitrateAllocationParameters::VideoBitrateAllocationParameters(
|
||||
DataRate total_bitrate,
|
||||
DataRate stable_bitrate,
|
||||
double framerate)
|
||||
: total_bitrate(total_bitrate),
|
||||
stable_bitrate(stable_bitrate),
|
||||
framerate(framerate) {}
|
||||
|
||||
VideoBitrateAllocationParameters::~VideoBitrateAllocationParameters() = default;
|
||||
|
||||
VideoBitrateAllocation VideoBitrateAllocator::GetAllocation(
|
||||
uint32_t total_bitrate_bps,
|
||||
uint32_t framerate) {
|
||||
return Allocate({DataRate::BitsPerSec(total_bitrate_bps),
|
||||
DataRate::BitsPerSec(total_bitrate_bps),
|
||||
static_cast<double>(framerate)});
|
||||
}
|
||||
|
||||
VideoBitrateAllocation VideoBitrateAllocator::Allocate(
|
||||
VideoBitrateAllocationParameters parameters) {
|
||||
return GetAllocation(parameters.total_bitrate.bps(), parameters.framerate);
|
||||
}
|
||||
|
||||
void VideoBitrateAllocator::SetLegacyConferenceMode(bool enabled) {}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
/*
|
||||
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_VIDEO_BITRATE_ALLOCATOR_H_
|
||||
#define API_VIDEO_VIDEO_BITRATE_ALLOCATOR_H_
|
||||
|
||||
#include "api/units/data_rate.h"
|
||||
#include "api/video/video_bitrate_allocation.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
struct VideoBitrateAllocationParameters {
|
||||
VideoBitrateAllocationParameters(uint32_t total_bitrate_bps,
|
||||
uint32_t framerate);
|
||||
VideoBitrateAllocationParameters(DataRate total_bitrate, double framerate);
|
||||
VideoBitrateAllocationParameters(DataRate total_bitrate,
|
||||
DataRate stable_bitrate,
|
||||
double framerate);
|
||||
~VideoBitrateAllocationParameters();
|
||||
|
||||
DataRate total_bitrate;
|
||||
DataRate stable_bitrate;
|
||||
double framerate;
|
||||
};
|
||||
|
||||
class VideoBitrateAllocator {
|
||||
public:
|
||||
VideoBitrateAllocator() {}
|
||||
virtual ~VideoBitrateAllocator() {}
|
||||
|
||||
virtual VideoBitrateAllocation GetAllocation(uint32_t total_bitrate_bps,
|
||||
uint32_t framerate);
|
||||
|
||||
virtual VideoBitrateAllocation Allocate(
|
||||
VideoBitrateAllocationParameters parameters);
|
||||
|
||||
// Deprecated: Only used to work around issues with the legacy conference
|
||||
// screenshare mode and shouldn't be needed by any subclasses.
|
||||
virtual void SetLegacyConferenceMode(bool enabled);
|
||||
};
|
||||
|
||||
class VideoBitrateAllocationObserver {
|
||||
public:
|
||||
VideoBitrateAllocationObserver() {}
|
||||
virtual ~VideoBitrateAllocationObserver() {}
|
||||
|
||||
virtual void OnBitrateAllocationUpdated(
|
||||
const VideoBitrateAllocation& allocation) = 0;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_VIDEO_BITRATE_ALLOCATOR_H_
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
/*
|
||||
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_VIDEO_BITRATE_ALLOCATOR_FACTORY_H_
|
||||
#define API_VIDEO_VIDEO_BITRATE_ALLOCATOR_FACTORY_H_
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "api/video/video_bitrate_allocator.h"
|
||||
#include "api/video_codecs/video_codec.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// A factory that creates VideoBitrateAllocator.
|
||||
// NOTE: This class is still under development and may change without notice.
|
||||
class VideoBitrateAllocatorFactory {
|
||||
public:
|
||||
virtual ~VideoBitrateAllocatorFactory() = default;
|
||||
// Creates a VideoBitrateAllocator for a specific video codec.
|
||||
virtual std::unique_ptr<VideoBitrateAllocator> CreateVideoBitrateAllocator(
|
||||
const VideoCodec& codec) = 0;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_VIDEO_BITRATE_ALLOCATOR_FACTORY_H_
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
/*
|
||||
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_VIDEO_CODEC_CONSTANTS_H_
|
||||
#define API_VIDEO_VIDEO_CODEC_CONSTANTS_H_
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
enum : int { kMaxEncoderBuffers = 8 };
|
||||
enum : int { kMaxSimulcastStreams = 3 };
|
||||
enum : int { kMaxSpatialLayers = 5 };
|
||||
enum : int { kMaxTemporalStreams = 4 };
|
||||
enum : int { kMaxPreferredPixelFormats = 5 };
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_VIDEO_CODEC_CONSTANTS_H_
|
||||
30
TMessagesProj/jni/voip/webrtc/api/video/video_codec_type.h
Normal file
30
TMessagesProj/jni/voip/webrtc/api/video/video_codec_type.h
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
/*
|
||||
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_VIDEO_CODEC_TYPE_H_
|
||||
#define API_VIDEO_VIDEO_CODEC_TYPE_H_
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
enum VideoCodecType {
|
||||
// There are various memset(..., 0, ...) calls in the code that rely on
|
||||
// kVideoCodecGeneric being zero.
|
||||
kVideoCodecGeneric = 0,
|
||||
kVideoCodecVP8,
|
||||
kVideoCodecVP9,
|
||||
kVideoCodecAV1,
|
||||
kVideoCodecH264,
|
||||
kVideoCodecH265,
|
||||
kVideoCodecMultiplex,
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_VIDEO_CODEC_TYPE_H_
|
||||
|
|
@ -0,0 +1,43 @@
|
|||
/*
|
||||
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/video_content_type.h"
|
||||
|
||||
#include "rtc_base/checks.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace videocontenttypehelpers {
|
||||
|
||||
namespace {
|
||||
static constexpr uint8_t kScreenshareBitsSize = 1;
|
||||
static constexpr uint8_t kScreenshareBitsMask =
|
||||
(1u << kScreenshareBitsSize) - 1;
|
||||
} // namespace
|
||||
|
||||
bool IsScreenshare(const VideoContentType& content_type) {
|
||||
// Ensure no bits apart from the screenshare bit is set.
|
||||
// This CHECK is a temporary measure to detect code that introduces
|
||||
// values according to old versions.
|
||||
RTC_CHECK((static_cast<uint8_t>(content_type) & !kScreenshareBitsMask) == 0);
|
||||
return (static_cast<uint8_t>(content_type) & kScreenshareBitsMask) > 0;
|
||||
}
|
||||
|
||||
bool IsValidContentType(uint8_t value) {
|
||||
// Only the screenshare bit is allowed.
|
||||
// However, due to previous usage of the next 5 bits, we allow
|
||||
// the lower 6 bits to be set.
|
||||
return value < (1 << 6);
|
||||
}
|
||||
|
||||
const char* ToString(const VideoContentType& content_type) {
|
||||
return IsScreenshare(content_type) ? "screen" : "realtime";
|
||||
}
|
||||
} // namespace videocontenttypehelpers
|
||||
} // namespace webrtc
|
||||
36
TMessagesProj/jni/voip/webrtc/api/video/video_content_type.h
Normal file
36
TMessagesProj/jni/voip/webrtc/api/video/video_content_type.h
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
/*
|
||||
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_VIDEO_CONTENT_TYPE_H_
|
||||
#define API_VIDEO_VIDEO_CONTENT_TYPE_H_
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// VideoContentType stored as a single byte, which is sent over the network
|
||||
// in the rtp-hdrext/video-content-type extension.
|
||||
// Only the lowest bit is used, per the enum.
|
||||
enum class VideoContentType : uint8_t {
|
||||
UNSPECIFIED = 0,
|
||||
SCREENSHARE = 1,
|
||||
};
|
||||
|
||||
namespace videocontenttypehelpers {
|
||||
bool IsScreenshare(const VideoContentType& content_type);
|
||||
|
||||
bool IsValidContentType(uint8_t value);
|
||||
|
||||
const char* ToString(const VideoContentType& content_type);
|
||||
} // namespace videocontenttypehelpers
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_VIDEO_CONTENT_TYPE_H_
|
||||
334
TMessagesProj/jni/voip/webrtc/api/video/video_frame.cc
Normal file
334
TMessagesProj/jni/voip/webrtc/api/video/video_frame.cc
Normal file
|
|
@ -0,0 +1,334 @@
|
|||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/video_frame.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <utility>
|
||||
|
||||
#include "rtc_base/checks.h"
|
||||
#include "rtc_base/time_utils.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
void VideoFrame::UpdateRect::Union(const UpdateRect& other) {
|
||||
if (other.IsEmpty())
|
||||
return;
|
||||
if (IsEmpty()) {
|
||||
*this = other;
|
||||
return;
|
||||
}
|
||||
int right = std::max(offset_x + width, other.offset_x + other.width);
|
||||
int bottom = std::max(offset_y + height, other.offset_y + other.height);
|
||||
offset_x = std::min(offset_x, other.offset_x);
|
||||
offset_y = std::min(offset_y, other.offset_y);
|
||||
width = right - offset_x;
|
||||
height = bottom - offset_y;
|
||||
RTC_DCHECK_GT(width, 0);
|
||||
RTC_DCHECK_GT(height, 0);
|
||||
}
|
||||
|
||||
void VideoFrame::UpdateRect::Intersect(const UpdateRect& other) {
|
||||
if (other.IsEmpty() || IsEmpty()) {
|
||||
MakeEmptyUpdate();
|
||||
return;
|
||||
}
|
||||
|
||||
int right = std::min(offset_x + width, other.offset_x + other.width);
|
||||
int bottom = std::min(offset_y + height, other.offset_y + other.height);
|
||||
offset_x = std::max(offset_x, other.offset_x);
|
||||
offset_y = std::max(offset_y, other.offset_y);
|
||||
width = right - offset_x;
|
||||
height = bottom - offset_y;
|
||||
if (width <= 0 || height <= 0) {
|
||||
MakeEmptyUpdate();
|
||||
}
|
||||
}
|
||||
|
||||
void VideoFrame::UpdateRect::MakeEmptyUpdate() {
|
||||
width = height = offset_x = offset_y = 0;
|
||||
}
|
||||
|
||||
bool VideoFrame::UpdateRect::IsEmpty() const {
|
||||
return width == 0 && height == 0;
|
||||
}
|
||||
|
||||
VideoFrame::UpdateRect VideoFrame::UpdateRect::ScaleWithFrame(
|
||||
int frame_width,
|
||||
int frame_height,
|
||||
int crop_x,
|
||||
int crop_y,
|
||||
int crop_width,
|
||||
int crop_height,
|
||||
int scaled_width,
|
||||
int scaled_height) const {
|
||||
RTC_DCHECK_GT(frame_width, 0);
|
||||
RTC_DCHECK_GT(frame_height, 0);
|
||||
|
||||
RTC_DCHECK_GT(crop_width, 0);
|
||||
RTC_DCHECK_GT(crop_height, 0);
|
||||
|
||||
RTC_DCHECK_LE(crop_width + crop_x, frame_width);
|
||||
RTC_DCHECK_LE(crop_height + crop_y, frame_height);
|
||||
|
||||
RTC_DCHECK_GT(scaled_width, 0);
|
||||
RTC_DCHECK_GT(scaled_height, 0);
|
||||
|
||||
// Check if update rect is out of the cropped area.
|
||||
if (offset_x + width < crop_x || offset_x > crop_x + crop_width ||
|
||||
offset_y + height < crop_y || offset_y > crop_y + crop_width) {
|
||||
return {0, 0, 0, 0};
|
||||
}
|
||||
|
||||
int x = offset_x - crop_x;
|
||||
int w = width;
|
||||
if (x < 0) {
|
||||
w += x;
|
||||
x = 0;
|
||||
}
|
||||
int y = offset_y - crop_y;
|
||||
int h = height;
|
||||
if (y < 0) {
|
||||
h += y;
|
||||
y = 0;
|
||||
}
|
||||
|
||||
// Lower corner is rounded down.
|
||||
x = x * scaled_width / crop_width;
|
||||
y = y * scaled_height / crop_height;
|
||||
// Upper corner is rounded up.
|
||||
w = (w * scaled_width + crop_width - 1) / crop_width;
|
||||
h = (h * scaled_height + crop_height - 1) / crop_height;
|
||||
|
||||
// Round to full 2x2 blocks due to possible subsampling in the pixel data.
|
||||
if (x % 2) {
|
||||
--x;
|
||||
++w;
|
||||
}
|
||||
if (y % 2) {
|
||||
--y;
|
||||
++h;
|
||||
}
|
||||
if (w % 2) {
|
||||
++w;
|
||||
}
|
||||
if (h % 2) {
|
||||
++h;
|
||||
}
|
||||
|
||||
// Expand the update rect by 2 pixels in each direction to include any
|
||||
// possible scaling artifacts.
|
||||
if (scaled_width != crop_width || scaled_height != crop_height) {
|
||||
if (x > 0) {
|
||||
x -= 2;
|
||||
w += 2;
|
||||
}
|
||||
if (y > 0) {
|
||||
y -= 2;
|
||||
h += 2;
|
||||
}
|
||||
w += 2;
|
||||
h += 2;
|
||||
}
|
||||
|
||||
// Ensure update rect is inside frame dimensions.
|
||||
if (x + w > scaled_width) {
|
||||
w = scaled_width - x;
|
||||
}
|
||||
if (y + h > scaled_height) {
|
||||
h = scaled_height - y;
|
||||
}
|
||||
RTC_DCHECK_GE(w, 0);
|
||||
RTC_DCHECK_GE(h, 0);
|
||||
if (w == 0 || h == 0) {
|
||||
w = 0;
|
||||
h = 0;
|
||||
x = 0;
|
||||
y = 0;
|
||||
}
|
||||
|
||||
return {x, y, w, h};
|
||||
}
|
||||
|
||||
VideoFrame::Builder::Builder() = default;
|
||||
|
||||
VideoFrame::Builder::~Builder() = default;
|
||||
|
||||
VideoFrame VideoFrame::Builder::build() {
|
||||
RTC_CHECK(video_frame_buffer_ != nullptr);
|
||||
return VideoFrame(id_, video_frame_buffer_, timestamp_us_,
|
||||
capture_time_identifier_, reference_time_, timestamp_rtp_,
|
||||
ntp_time_ms_, rotation_, color_space_, render_parameters_,
|
||||
update_rect_, packet_infos_);
|
||||
}
|
||||
|
||||
VideoFrame::Builder& VideoFrame::Builder::set_video_frame_buffer(
|
||||
const rtc::scoped_refptr<VideoFrameBuffer>& buffer) {
|
||||
video_frame_buffer_ = buffer;
|
||||
return *this;
|
||||
}
|
||||
|
||||
VideoFrame::Builder& VideoFrame::Builder::set_timestamp_ms(
|
||||
int64_t timestamp_ms) {
|
||||
timestamp_us_ = timestamp_ms * rtc::kNumMicrosecsPerMillisec;
|
||||
return *this;
|
||||
}
|
||||
|
||||
VideoFrame::Builder& VideoFrame::Builder::set_timestamp_us(
|
||||
int64_t timestamp_us) {
|
||||
timestamp_us_ = timestamp_us;
|
||||
return *this;
|
||||
}
|
||||
|
||||
VideoFrame::Builder& VideoFrame::Builder::set_capture_time_identifier(
|
||||
const absl::optional<Timestamp>& capture_time_identifier) {
|
||||
capture_time_identifier_ = capture_time_identifier;
|
||||
return *this;
|
||||
}
|
||||
|
||||
VideoFrame::Builder& VideoFrame::Builder::set_reference_time(
|
||||
const absl::optional<Timestamp>& reference_time) {
|
||||
reference_time_ = reference_time;
|
||||
return *this;
|
||||
}
|
||||
|
||||
VideoFrame::Builder& VideoFrame::Builder::set_timestamp_rtp(
|
||||
uint32_t timestamp_rtp) {
|
||||
timestamp_rtp_ = timestamp_rtp;
|
||||
return *this;
|
||||
}
|
||||
|
||||
VideoFrame::Builder& VideoFrame::Builder::set_ntp_time_ms(int64_t ntp_time_ms) {
|
||||
ntp_time_ms_ = ntp_time_ms;
|
||||
return *this;
|
||||
}
|
||||
|
||||
VideoFrame::Builder& VideoFrame::Builder::set_rotation(VideoRotation rotation) {
|
||||
rotation_ = rotation;
|
||||
return *this;
|
||||
}
|
||||
|
||||
VideoFrame::Builder& VideoFrame::Builder::set_color_space(
|
||||
const absl::optional<ColorSpace>& color_space) {
|
||||
color_space_ = color_space;
|
||||
return *this;
|
||||
}
|
||||
|
||||
VideoFrame::Builder& VideoFrame::Builder::set_color_space(
|
||||
const ColorSpace* color_space) {
|
||||
color_space_ =
|
||||
color_space ? absl::make_optional(*color_space) : absl::nullopt;
|
||||
return *this;
|
||||
}
|
||||
|
||||
VideoFrame::Builder& VideoFrame::Builder::set_id(uint16_t id) {
|
||||
id_ = id;
|
||||
return *this;
|
||||
}
|
||||
|
||||
VideoFrame::Builder& VideoFrame::Builder::set_update_rect(
|
||||
const absl::optional<VideoFrame::UpdateRect>& update_rect) {
|
||||
update_rect_ = update_rect;
|
||||
return *this;
|
||||
}
|
||||
|
||||
VideoFrame::Builder& VideoFrame::Builder::set_packet_infos(
|
||||
RtpPacketInfos packet_infos) {
|
||||
packet_infos_ = std::move(packet_infos);
|
||||
return *this;
|
||||
}
|
||||
|
||||
VideoFrame::VideoFrame(const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
|
||||
webrtc::VideoRotation rotation,
|
||||
int64_t timestamp_us)
|
||||
: video_frame_buffer_(buffer),
|
||||
timestamp_rtp_(0),
|
||||
ntp_time_ms_(0),
|
||||
timestamp_us_(timestamp_us),
|
||||
rotation_(rotation) {}
|
||||
|
||||
VideoFrame::VideoFrame(const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
|
||||
uint32_t timestamp_rtp,
|
||||
int64_t render_time_ms,
|
||||
VideoRotation rotation)
|
||||
: video_frame_buffer_(buffer),
|
||||
timestamp_rtp_(timestamp_rtp),
|
||||
ntp_time_ms_(0),
|
||||
timestamp_us_(render_time_ms * rtc::kNumMicrosecsPerMillisec),
|
||||
rotation_(rotation) {
|
||||
RTC_DCHECK(buffer);
|
||||
}
|
||||
|
||||
VideoFrame::VideoFrame(uint16_t id,
|
||||
const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
|
||||
int64_t timestamp_us,
|
||||
const absl::optional<Timestamp>& capture_time_identifier,
|
||||
const absl::optional<Timestamp>& reference_time,
|
||||
uint32_t timestamp_rtp,
|
||||
int64_t ntp_time_ms,
|
||||
VideoRotation rotation,
|
||||
const absl::optional<ColorSpace>& color_space,
|
||||
const RenderParameters& render_parameters,
|
||||
const absl::optional<UpdateRect>& update_rect,
|
||||
RtpPacketInfos packet_infos)
|
||||
: id_(id),
|
||||
video_frame_buffer_(buffer),
|
||||
timestamp_rtp_(timestamp_rtp),
|
||||
ntp_time_ms_(ntp_time_ms),
|
||||
timestamp_us_(timestamp_us),
|
||||
capture_time_identifier_(capture_time_identifier),
|
||||
reference_time_(reference_time),
|
||||
rotation_(rotation),
|
||||
color_space_(color_space),
|
||||
render_parameters_(render_parameters),
|
||||
update_rect_(update_rect),
|
||||
packet_infos_(std::move(packet_infos)) {
|
||||
if (update_rect_) {
|
||||
RTC_DCHECK_GE(update_rect_->offset_x, 0);
|
||||
RTC_DCHECK_GE(update_rect_->offset_y, 0);
|
||||
RTC_DCHECK_LE(update_rect_->offset_x + update_rect_->width, width());
|
||||
RTC_DCHECK_LE(update_rect_->offset_y + update_rect_->height, height());
|
||||
}
|
||||
}
|
||||
|
||||
VideoFrame::~VideoFrame() = default;
|
||||
|
||||
VideoFrame::VideoFrame(const VideoFrame&) = default;
|
||||
VideoFrame::VideoFrame(VideoFrame&&) = default;
|
||||
VideoFrame& VideoFrame::operator=(const VideoFrame&) = default;
|
||||
VideoFrame& VideoFrame::operator=(VideoFrame&&) = default;
|
||||
|
||||
int VideoFrame::width() const {
|
||||
return video_frame_buffer_ ? video_frame_buffer_->width() : 0;
|
||||
}
|
||||
|
||||
int VideoFrame::height() const {
|
||||
return video_frame_buffer_ ? video_frame_buffer_->height() : 0;
|
||||
}
|
||||
|
||||
uint32_t VideoFrame::size() const {
|
||||
return width() * height();
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<VideoFrameBuffer> VideoFrame::video_frame_buffer() const {
|
||||
return video_frame_buffer_;
|
||||
}
|
||||
|
||||
void VideoFrame::set_video_frame_buffer(
|
||||
const rtc::scoped_refptr<VideoFrameBuffer>& buffer) {
|
||||
RTC_CHECK(buffer);
|
||||
video_frame_buffer_ = buffer;
|
||||
}
|
||||
|
||||
int64_t VideoFrame::render_time_ms() const {
|
||||
return timestamp_us() / rtc::kNumMicrosecsPerMillisec;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
331
TMessagesProj/jni/voip/webrtc/api/video/video_frame.h
Normal file
331
TMessagesProj/jni/voip/webrtc/api/video/video_frame.h
Normal file
|
|
@ -0,0 +1,331 @@
|
|||
/*
|
||||
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_VIDEO_FRAME_H_
|
||||
#define API_VIDEO_VIDEO_FRAME_H_
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#include <utility>
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/rtp_packet_infos.h"
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/video/color_space.h"
|
||||
#include "api/video/hdr_metadata.h"
|
||||
#include "api/video/video_frame_buffer.h"
|
||||
#include "api/video/video_rotation.h"
|
||||
#include "rtc_base/checks.h"
|
||||
#include "rtc_base/system/rtc_export.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class RTC_EXPORT VideoFrame {
|
||||
public:
|
||||
// Value used to signal that `VideoFrame::id()` is not set.
|
||||
static constexpr uint16_t kNotSetId = 0;
|
||||
|
||||
struct RTC_EXPORT UpdateRect {
|
||||
int offset_x = 0;
|
||||
int offset_y = 0;
|
||||
int width = 0;
|
||||
int height = 0;
|
||||
|
||||
// Makes this UpdateRect a bounding box of this and other rect.
|
||||
void Union(const UpdateRect& other);
|
||||
|
||||
// Makes this UpdateRect an intersection of this and other rect.
|
||||
void Intersect(const UpdateRect& other);
|
||||
|
||||
// Sets everything to 0, making this UpdateRect a zero-size (empty) update.
|
||||
void MakeEmptyUpdate();
|
||||
|
||||
bool IsEmpty() const;
|
||||
|
||||
// Per-member equality check. Empty rectangles with different offsets would
|
||||
// be considered different.
|
||||
bool operator==(const UpdateRect& other) const {
|
||||
return other.offset_x == offset_x && other.offset_y == offset_y &&
|
||||
other.width == width && other.height == height;
|
||||
}
|
||||
|
||||
bool operator!=(const UpdateRect& other) const { return !(*this == other); }
|
||||
|
||||
// Scales update_rect given original frame dimensions.
|
||||
// Cropping is applied first, then rect is scaled down.
|
||||
// Update rect is snapped to 2x2 grid due to possible UV subsampling and
|
||||
// then expanded by additional 2 pixels in each direction to accommodate any
|
||||
// possible scaling artifacts.
|
||||
// Note, close but not equal update_rects on original frame may result in
|
||||
// the same scaled update rects.
|
||||
UpdateRect ScaleWithFrame(int frame_width,
|
||||
int frame_height,
|
||||
int crop_x,
|
||||
int crop_y,
|
||||
int crop_width,
|
||||
int crop_height,
|
||||
int scaled_width,
|
||||
int scaled_height) const;
|
||||
};
|
||||
|
||||
struct RTC_EXPORT ProcessingTime {
|
||||
TimeDelta Elapsed() const { return finish - start; }
|
||||
Timestamp start;
|
||||
Timestamp finish;
|
||||
};
|
||||
|
||||
struct RTC_EXPORT RenderParameters {
|
||||
bool use_low_latency_rendering = false;
|
||||
absl::optional<int32_t> max_composition_delay_in_frames;
|
||||
|
||||
bool operator==(const RenderParameters& other) const {
|
||||
return other.use_low_latency_rendering == use_low_latency_rendering &&
|
||||
other.max_composition_delay_in_frames ==
|
||||
max_composition_delay_in_frames;
|
||||
}
|
||||
|
||||
bool operator!=(const RenderParameters& other) const {
|
||||
return !(*this == other);
|
||||
}
|
||||
};
|
||||
|
||||
// Preferred way of building VideoFrame objects.
|
||||
class RTC_EXPORT Builder {
|
||||
public:
|
||||
Builder();
|
||||
~Builder();
|
||||
|
||||
VideoFrame build();
|
||||
Builder& set_video_frame_buffer(
|
||||
const rtc::scoped_refptr<VideoFrameBuffer>& buffer);
|
||||
Builder& set_timestamp_ms(int64_t timestamp_ms);
|
||||
Builder& set_timestamp_us(int64_t timestamp_us);
|
||||
Builder& set_capture_time_identifier(
|
||||
const absl::optional<Timestamp>& capture_time_identifier);
|
||||
Builder& set_reference_time(
|
||||
const absl::optional<Timestamp>& reference_time);
|
||||
Builder& set_timestamp_rtp(uint32_t timestamp_rtp);
|
||||
Builder& set_ntp_time_ms(int64_t ntp_time_ms);
|
||||
Builder& set_rotation(VideoRotation rotation);
|
||||
Builder& set_color_space(const absl::optional<ColorSpace>& color_space);
|
||||
Builder& set_color_space(const ColorSpace* color_space);
|
||||
Builder& set_id(uint16_t id);
|
||||
Builder& set_update_rect(const absl::optional<UpdateRect>& update_rect);
|
||||
Builder& set_packet_infos(RtpPacketInfos packet_infos);
|
||||
|
||||
private:
|
||||
uint16_t id_ = kNotSetId;
|
||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer_;
|
||||
int64_t timestamp_us_ = 0;
|
||||
absl::optional<Timestamp> capture_time_identifier_;
|
||||
absl::optional<Timestamp> reference_time_;
|
||||
uint32_t timestamp_rtp_ = 0;
|
||||
int64_t ntp_time_ms_ = 0;
|
||||
VideoRotation rotation_ = kVideoRotation_0;
|
||||
absl::optional<ColorSpace> color_space_;
|
||||
RenderParameters render_parameters_;
|
||||
absl::optional<UpdateRect> update_rect_;
|
||||
RtpPacketInfos packet_infos_;
|
||||
};
|
||||
|
||||
// To be deprecated. Migrate all use to Builder.
|
||||
VideoFrame(const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
|
||||
webrtc::VideoRotation rotation,
|
||||
int64_t timestamp_us);
|
||||
VideoFrame(const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
|
||||
uint32_t timestamp_rtp,
|
||||
int64_t render_time_ms,
|
||||
VideoRotation rotation);
|
||||
|
||||
~VideoFrame();
|
||||
|
||||
// Support move and copy.
|
||||
VideoFrame(const VideoFrame&);
|
||||
VideoFrame(VideoFrame&&);
|
||||
VideoFrame& operator=(const VideoFrame&);
|
||||
VideoFrame& operator=(VideoFrame&&);
|
||||
|
||||
// Get frame width.
|
||||
int width() const;
|
||||
// Get frame height.
|
||||
int height() const;
|
||||
// Get frame size in pixels.
|
||||
uint32_t size() const;
|
||||
|
||||
// Get frame ID. Returns `kNotSetId` if ID is not set. Not guaranteed to be
|
||||
// transferred from the sender to the receiver, but preserved on the sender
|
||||
// side. The id should be propagated between all frame modifications during
|
||||
// its lifetime from capturing to sending as encoded image. It is intended to
|
||||
// be unique over a time window of a few minutes for the peer connection to
|
||||
// which the corresponding video stream belongs to.
|
||||
uint16_t id() const { return id_; }
|
||||
void set_id(uint16_t id) { id_ = id; }
|
||||
|
||||
// System monotonic clock, same timebase as rtc::TimeMicros().
|
||||
int64_t timestamp_us() const { return timestamp_us_; }
|
||||
void set_timestamp_us(int64_t timestamp_us) { timestamp_us_ = timestamp_us; }
|
||||
|
||||
const absl::optional<Timestamp>& capture_time_identifier() const {
|
||||
return capture_time_identifier_;
|
||||
}
|
||||
void set_capture_time_identifier(
|
||||
const absl::optional<Timestamp>& capture_time_identifier) {
|
||||
capture_time_identifier_ = capture_time_identifier;
|
||||
}
|
||||
|
||||
const absl::optional<Timestamp>& reference_time() const {
|
||||
return reference_time_;
|
||||
}
|
||||
void set_reference_time(const absl::optional<Timestamp>& reference_time) {
|
||||
reference_time_ = reference_time;
|
||||
}
|
||||
|
||||
// Set frame timestamp (90kHz).
|
||||
void set_timestamp(uint32_t timestamp) { timestamp_rtp_ = timestamp; }
|
||||
|
||||
// Get frame timestamp (90kHz).
|
||||
uint32_t timestamp() const { return timestamp_rtp_; }
|
||||
|
||||
// Set capture ntp time in milliseconds.
|
||||
void set_ntp_time_ms(int64_t ntp_time_ms) { ntp_time_ms_ = ntp_time_ms; }
|
||||
|
||||
// Get capture ntp time in milliseconds.
|
||||
int64_t ntp_time_ms() const { return ntp_time_ms_; }
|
||||
|
||||
// Naming convention for Coordination of Video Orientation. Please see
|
||||
// http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/ts_126114v120700p.pdf
|
||||
//
|
||||
// "pending rotation" or "pending" = a frame that has a VideoRotation > 0.
|
||||
//
|
||||
// "not pending" = a frame that has a VideoRotation == 0.
|
||||
//
|
||||
// "apply rotation" = modify a frame from being "pending" to being "not
|
||||
// pending" rotation (a no-op for "unrotated").
|
||||
//
|
||||
VideoRotation rotation() const { return rotation_; }
|
||||
void set_rotation(VideoRotation rotation) { rotation_ = rotation; }
|
||||
|
||||
// Get color space when available.
|
||||
const absl::optional<ColorSpace>& color_space() const { return color_space_; }
|
||||
void set_color_space(const absl::optional<ColorSpace>& color_space) {
|
||||
color_space_ = color_space;
|
||||
}
|
||||
|
||||
RenderParameters render_parameters() const { return render_parameters_; }
|
||||
void set_render_parameters(const RenderParameters& render_parameters) {
|
||||
render_parameters_ = render_parameters;
|
||||
}
|
||||
|
||||
// Deprecated in favor of render_parameters, will be removed once Chromium is
|
||||
// updated. max_composition_delay_in_frames() is used in an experiment of a
|
||||
// low-latency renderer algorithm see crbug.com/1138888.
|
||||
[[deprecated("Use render_parameters() instead.")]] absl::optional<int32_t>
|
||||
max_composition_delay_in_frames() const {
|
||||
return render_parameters_.max_composition_delay_in_frames;
|
||||
}
|
||||
|
||||
// Get render time in milliseconds.
|
||||
int64_t render_time_ms() const;
|
||||
|
||||
// Return the underlying buffer. Never nullptr for a properly
|
||||
// initialized VideoFrame.
|
||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer() const;
|
||||
|
||||
void set_video_frame_buffer(
|
||||
const rtc::scoped_refptr<VideoFrameBuffer>& buffer);
|
||||
|
||||
// Return true if the frame is stored in a texture.
|
||||
bool is_texture() const {
|
||||
return video_frame_buffer()->type() == VideoFrameBuffer::Type::kNative;
|
||||
}
|
||||
|
||||
bool has_update_rect() const { return update_rect_.has_value(); }
|
||||
|
||||
// Returns update_rect set by the builder or set_update_rect() or whole frame
|
||||
// rect if no update rect is available.
|
||||
UpdateRect update_rect() const {
|
||||
return update_rect_.value_or(UpdateRect{0, 0, width(), height()});
|
||||
}
|
||||
|
||||
// Rectangle must be within the frame dimensions.
|
||||
void set_update_rect(const VideoFrame::UpdateRect& update_rect) {
|
||||
RTC_DCHECK_GE(update_rect.offset_x, 0);
|
||||
RTC_DCHECK_GE(update_rect.offset_y, 0);
|
||||
RTC_DCHECK_LE(update_rect.offset_x + update_rect.width, width());
|
||||
RTC_DCHECK_LE(update_rect.offset_y + update_rect.height, height());
|
||||
update_rect_ = update_rect;
|
||||
}
|
||||
|
||||
void clear_update_rect() { update_rect_ = absl::nullopt; }
|
||||
|
||||
// Get information about packets used to assemble this video frame. Might be
|
||||
// empty if the information isn't available.
|
||||
const RtpPacketInfos& packet_infos() const { return packet_infos_; }
|
||||
void set_packet_infos(RtpPacketInfos value) {
|
||||
packet_infos_ = std::move(value);
|
||||
}
|
||||
|
||||
const absl::optional<ProcessingTime> processing_time() const {
|
||||
return processing_time_;
|
||||
}
|
||||
void set_processing_time(const ProcessingTime& processing_time) {
|
||||
processing_time_ = processing_time;
|
||||
}
|
||||
|
||||
private:
|
||||
VideoFrame(uint16_t id,
|
||||
const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
|
||||
int64_t timestamp_us,
|
||||
const absl::optional<Timestamp>& capture_time_identifier,
|
||||
const absl::optional<Timestamp>& reference_time,
|
||||
uint32_t timestamp_rtp,
|
||||
int64_t ntp_time_ms,
|
||||
VideoRotation rotation,
|
||||
const absl::optional<ColorSpace>& color_space,
|
||||
const RenderParameters& render_parameters,
|
||||
const absl::optional<UpdateRect>& update_rect,
|
||||
RtpPacketInfos packet_infos);
|
||||
|
||||
uint16_t id_;
|
||||
// An opaque reference counted handle that stores the pixel data.
|
||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer_;
|
||||
uint32_t timestamp_rtp_;
|
||||
int64_t ntp_time_ms_;
|
||||
int64_t timestamp_us_;
|
||||
absl::optional<Timestamp> capture_time_identifier_;
|
||||
// Contains a monotonically increasing clock time and represents the time
|
||||
// when the frame was captured. Not all platforms provide the "true" sample
|
||||
// capture time in |reference_time| but might instead use a somewhat delayed
|
||||
// (by the time it took to capture the frame) version of it.
|
||||
absl::optional<Timestamp> reference_time_;
|
||||
VideoRotation rotation_;
|
||||
absl::optional<ColorSpace> color_space_;
|
||||
// Contains parameters that affect have the frame should be rendered.
|
||||
RenderParameters render_parameters_;
|
||||
// Updated since the last frame area. If present it means that the bounding
|
||||
// box of all the changes is within the rectangular area and is close to it.
|
||||
// If absent, it means that there's no information about the change at all and
|
||||
// update_rect() will return a rectangle corresponding to the entire frame.
|
||||
absl::optional<UpdateRect> update_rect_;
|
||||
// Information about packets used to assemble this video frame. This is needed
|
||||
// by `SourceTracker` when the frame is delivered to the RTCRtpReceiver's
|
||||
// MediaStreamTrack, in order to implement getContributingSources(). See:
|
||||
// https://w3c.github.io/webrtc-pc/#dom-rtcrtpreceiver-getcontributingsources
|
||||
RtpPacketInfos packet_infos_;
|
||||
// Processing timestamps of the frame. For received video frames these are the
|
||||
// timestamps when the frame is sent to the decoder and the decoded image
|
||||
// returned from the decoder.
|
||||
// Currently, not set for locally captured video frames.
|
||||
absl::optional<ProcessingTime> processing_time_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_VIDEO_FRAME_H_
|
||||
242
TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.cc
Normal file
242
TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.cc
Normal file
|
|
@ -0,0 +1,242 @@
|
|||
/*
|
||||
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/video_frame_buffer.h"
|
||||
|
||||
#include "api/video/i420_buffer.h"
|
||||
#include "api/video/i422_buffer.h"
|
||||
#include "api/video/i444_buffer.h"
|
||||
#include "api/video/nv12_buffer.h"
|
||||
#include "rtc_base/checks.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
rtc::scoped_refptr<VideoFrameBuffer> VideoFrameBuffer::CropAndScale(
|
||||
int offset_x,
|
||||
int offset_y,
|
||||
int crop_width,
|
||||
int crop_height,
|
||||
int scaled_width,
|
||||
int scaled_height) {
|
||||
rtc::scoped_refptr<I420Buffer> result =
|
||||
I420Buffer::Create(scaled_width, scaled_height);
|
||||
result->CropAndScaleFrom(*this->ToI420(), offset_x, offset_y, crop_width,
|
||||
crop_height);
|
||||
return result;
|
||||
}
|
||||
|
||||
const I420BufferInterface* VideoFrameBuffer::GetI420() const {
|
||||
// Overridden by subclasses that can return an I420 buffer without any
|
||||
// conversion, in particular, I420BufferInterface.
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
const I420ABufferInterface* VideoFrameBuffer::GetI420A() const {
|
||||
RTC_CHECK(type() == Type::kI420A);
|
||||
return static_cast<const I420ABufferInterface*>(this);
|
||||
}
|
||||
|
||||
const I444BufferInterface* VideoFrameBuffer::GetI444() const {
|
||||
RTC_CHECK(type() == Type::kI444);
|
||||
return static_cast<const I444BufferInterface*>(this);
|
||||
}
|
||||
|
||||
const I422BufferInterface* VideoFrameBuffer::GetI422() const {
|
||||
RTC_CHECK(type() == Type::kI422);
|
||||
return static_cast<const I422BufferInterface*>(this);
|
||||
}
|
||||
|
||||
const I010BufferInterface* VideoFrameBuffer::GetI010() const {
|
||||
RTC_CHECK(type() == Type::kI010);
|
||||
return static_cast<const I010BufferInterface*>(this);
|
||||
}
|
||||
|
||||
const I210BufferInterface* VideoFrameBuffer::GetI210() const {
|
||||
RTC_CHECK(type() == Type::kI210);
|
||||
return static_cast<const I210BufferInterface*>(this);
|
||||
}
|
||||
|
||||
const I410BufferInterface* VideoFrameBuffer::GetI410() const {
|
||||
RTC_CHECK(type() == Type::kI410);
|
||||
return static_cast<const I410BufferInterface*>(this);
|
||||
}
|
||||
|
||||
const NV12BufferInterface* VideoFrameBuffer::GetNV12() const {
|
||||
RTC_CHECK(type() == Type::kNV12);
|
||||
return static_cast<const NV12BufferInterface*>(this);
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<VideoFrameBuffer> VideoFrameBuffer::GetMappedFrameBuffer(
|
||||
rtc::ArrayView<Type> types) {
|
||||
RTC_CHECK(type() == Type::kNative);
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
VideoFrameBuffer::Type I420BufferInterface::type() const {
|
||||
return Type::kI420;
|
||||
}
|
||||
|
||||
const char* VideoFrameBufferTypeToString(VideoFrameBuffer::Type type) {
|
||||
switch (type) {
|
||||
case VideoFrameBuffer::Type::kNative:
|
||||
return "kNative";
|
||||
case VideoFrameBuffer::Type::kI420:
|
||||
return "kI420";
|
||||
case VideoFrameBuffer::Type::kI420A:
|
||||
return "kI420A";
|
||||
case VideoFrameBuffer::Type::kI444:
|
||||
return "kI444";
|
||||
case VideoFrameBuffer::Type::kI422:
|
||||
return "kI422";
|
||||
case VideoFrameBuffer::Type::kI010:
|
||||
return "kI010";
|
||||
case VideoFrameBuffer::Type::kI210:
|
||||
return "kI210";
|
||||
case VideoFrameBuffer::Type::kI410:
|
||||
return "kI410";
|
||||
case VideoFrameBuffer::Type::kNV12:
|
||||
return "kNV12";
|
||||
default:
|
||||
RTC_DCHECK_NOTREACHED();
|
||||
}
|
||||
}
|
||||
|
||||
int I420BufferInterface::ChromaWidth() const {
|
||||
return (width() + 1) / 2;
|
||||
}
|
||||
|
||||
int I420BufferInterface::ChromaHeight() const {
|
||||
return (height() + 1) / 2;
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<I420BufferInterface> I420BufferInterface::ToI420() {
|
||||
return rtc::scoped_refptr<I420BufferInterface>(this);
|
||||
}
|
||||
|
||||
const I420BufferInterface* I420BufferInterface::GetI420() const {
|
||||
return this;
|
||||
}
|
||||
|
||||
VideoFrameBuffer::Type I420ABufferInterface::type() const {
|
||||
return Type::kI420A;
|
||||
}
|
||||
|
||||
VideoFrameBuffer::Type I444BufferInterface::type() const {
|
||||
return Type::kI444;
|
||||
}
|
||||
|
||||
int I444BufferInterface::ChromaWidth() const {
|
||||
return width();
|
||||
}
|
||||
|
||||
int I444BufferInterface::ChromaHeight() const {
|
||||
return height();
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<VideoFrameBuffer> I444BufferInterface::CropAndScale(
|
||||
int offset_x,
|
||||
int offset_y,
|
||||
int crop_width,
|
||||
int crop_height,
|
||||
int scaled_width,
|
||||
int scaled_height) {
|
||||
rtc::scoped_refptr<I444Buffer> result =
|
||||
I444Buffer::Create(scaled_width, scaled_height);
|
||||
result->CropAndScaleFrom(*this, offset_x, offset_y, crop_width, crop_height);
|
||||
return result;
|
||||
}
|
||||
|
||||
VideoFrameBuffer::Type I422BufferInterface::type() const {
|
||||
return Type::kI422;
|
||||
}
|
||||
|
||||
int I422BufferInterface::ChromaWidth() const {
|
||||
return (width() + 1) / 2;
|
||||
}
|
||||
|
||||
int I422BufferInterface::ChromaHeight() const {
|
||||
return height();
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<VideoFrameBuffer> I422BufferInterface::CropAndScale(
|
||||
int offset_x,
|
||||
int offset_y,
|
||||
int crop_width,
|
||||
int crop_height,
|
||||
int scaled_width,
|
||||
int scaled_height) {
|
||||
rtc::scoped_refptr<I422Buffer> result =
|
||||
I422Buffer::Create(scaled_width, scaled_height);
|
||||
result->CropAndScaleFrom(*this, offset_x, offset_y, crop_width, crop_height);
|
||||
return result;
|
||||
}
|
||||
|
||||
VideoFrameBuffer::Type I010BufferInterface::type() const {
|
||||
return Type::kI010;
|
||||
}
|
||||
|
||||
int I010BufferInterface::ChromaWidth() const {
|
||||
return (width() + 1) / 2;
|
||||
}
|
||||
|
||||
int I010BufferInterface::ChromaHeight() const {
|
||||
return (height() + 1) / 2;
|
||||
}
|
||||
|
||||
VideoFrameBuffer::Type I210BufferInterface::type() const {
|
||||
return Type::kI210;
|
||||
}
|
||||
|
||||
int I210BufferInterface::ChromaWidth() const {
|
||||
return (width() + 1) / 2;
|
||||
}
|
||||
|
||||
int I210BufferInterface::ChromaHeight() const {
|
||||
return height();
|
||||
}
|
||||
|
||||
VideoFrameBuffer::Type I410BufferInterface::type() const {
|
||||
return Type::kI410;
|
||||
}
|
||||
|
||||
int I410BufferInterface::ChromaWidth() const {
|
||||
return width();
|
||||
}
|
||||
|
||||
int I410BufferInterface::ChromaHeight() const {
|
||||
return height();
|
||||
}
|
||||
|
||||
VideoFrameBuffer::Type NV12BufferInterface::type() const {
|
||||
return Type::kNV12;
|
||||
}
|
||||
|
||||
int NV12BufferInterface::ChromaWidth() const {
|
||||
return (width() + 1) / 2;
|
||||
}
|
||||
|
||||
int NV12BufferInterface::ChromaHeight() const {
|
||||
return (height() + 1) / 2;
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<VideoFrameBuffer> NV12BufferInterface::CropAndScale(
|
||||
int offset_x,
|
||||
int offset_y,
|
||||
int crop_width,
|
||||
int crop_height,
|
||||
int scaled_width,
|
||||
int scaled_height) {
|
||||
rtc::scoped_refptr<NV12Buffer> result =
|
||||
NV12Buffer::Create(scaled_width, scaled_height);
|
||||
result->CropAndScaleFrom(*this, offset_x, offset_y, crop_width, crop_height);
|
||||
return result;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
325
TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.h
Normal file
325
TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.h
Normal file
|
|
@ -0,0 +1,325 @@
|
|||
/*
|
||||
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_VIDEO_FRAME_BUFFER_H_
|
||||
#define API_VIDEO_VIDEO_FRAME_BUFFER_H_
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#include "api/array_view.h"
|
||||
#include "api/ref_count.h"
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "rtc_base/system/rtc_export.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class I420BufferInterface;
|
||||
class I420ABufferInterface;
|
||||
class I422BufferInterface;
|
||||
class I444BufferInterface;
|
||||
class I010BufferInterface;
|
||||
class I210BufferInterface;
|
||||
class I410BufferInterface;
|
||||
class NV12BufferInterface;
|
||||
|
||||
// Base class for frame buffers of different types of pixel format and storage.
|
||||
// The tag in type() indicates how the data is represented, and each type is
|
||||
// implemented as a subclass. To access the pixel data, call the appropriate
|
||||
// GetXXX() function, where XXX represents the type. There is also a function
|
||||
// ToI420() that returns a frame buffer in I420 format, converting from the
|
||||
// underlying representation if necessary. I420 is the most widely accepted
|
||||
// format and serves as a fallback for video sinks that can only handle I420,
|
||||
// e.g. the internal WebRTC software encoders. A special enum value 'kNative' is
|
||||
// provided for external clients to implement their own frame buffer
|
||||
// representations, e.g. as textures. The external client can produce such
|
||||
// native frame buffers from custom video sources, and then cast it back to the
|
||||
// correct subclass in custom video sinks. The purpose of this is to improve
|
||||
// performance by providing an optimized path without intermediate conversions.
|
||||
// Frame metadata such as rotation and timestamp are stored in
|
||||
// webrtc::VideoFrame, and not here.
|
||||
class RTC_EXPORT VideoFrameBuffer : public webrtc::RefCountInterface {
|
||||
public:
|
||||
// New frame buffer types will be added conservatively when there is an
|
||||
// opportunity to optimize the path between some pair of video source and
|
||||
// video sink.
|
||||
// GENERATED_JAVA_ENUM_PACKAGE: org.webrtc
|
||||
// GENERATED_JAVA_CLASS_NAME_OVERRIDE: VideoFrameBufferType
|
||||
enum class Type {
|
||||
kNative,
|
||||
kI420,
|
||||
kI420A,
|
||||
kI422,
|
||||
kI444,
|
||||
kI010,
|
||||
kI210,
|
||||
kI410,
|
||||
kNV12,
|
||||
};
|
||||
|
||||
// This function specifies in what pixel format the data is stored in.
|
||||
virtual Type type() const = 0;
|
||||
|
||||
// The resolution of the frame in pixels. For formats where some planes are
|
||||
// subsampled, this is the highest-resolution plane.
|
||||
virtual int width() const = 0;
|
||||
virtual int height() const = 0;
|
||||
|
||||
// Returns a memory-backed frame buffer in I420 format. If the pixel data is
|
||||
// in another format, a conversion will take place. All implementations must
|
||||
// provide a fallback to I420 for compatibility with e.g. the internal WebRTC
|
||||
// software encoders.
|
||||
// Conversion may fail, for example if reading the pixel data from a texture
|
||||
// fails. If the conversion fails, nullptr is returned.
|
||||
virtual rtc::scoped_refptr<I420BufferInterface> ToI420() = 0;
|
||||
|
||||
// GetI420() methods should return I420 buffer if conversion is trivial, i.e
|
||||
// no change for binary data is needed. Otherwise these methods should return
|
||||
// nullptr. One example of buffer with that property is
|
||||
// WebrtcVideoFrameAdapter in Chrome - it's I420 buffer backed by a shared
|
||||
// memory buffer. Therefore it must have type kNative. Yet, ToI420()
|
||||
// doesn't affect binary data at all. Another example is any I420A buffer.
|
||||
// TODO(https://crbug.com/webrtc/12021): Make this method non-virtual and
|
||||
// behave as the other GetXXX methods below.
|
||||
virtual const I420BufferInterface* GetI420() const;
|
||||
|
||||
// A format specific scale function. Default implementation works by
|
||||
// converting to I420. But more efficient implementations may override it,
|
||||
// especially for kNative.
|
||||
// First, the image is cropped to `crop_width` and `crop_height` and then
|
||||
// scaled to `scaled_width` and `scaled_height`.
|
||||
virtual rtc::scoped_refptr<VideoFrameBuffer> CropAndScale(int offset_x,
|
||||
int offset_y,
|
||||
int crop_width,
|
||||
int crop_height,
|
||||
int scaled_width,
|
||||
int scaled_height);
|
||||
|
||||
// Alias for common use case.
|
||||
rtc::scoped_refptr<VideoFrameBuffer> Scale(int scaled_width,
|
||||
int scaled_height) {
|
||||
return CropAndScale(0, 0, width(), height(), scaled_width, scaled_height);
|
||||
}
|
||||
|
||||
// These functions should only be called if type() is of the correct type.
|
||||
// Calling with a different type will result in a crash.
|
||||
const I420ABufferInterface* GetI420A() const;
|
||||
const I422BufferInterface* GetI422() const;
|
||||
const I444BufferInterface* GetI444() const;
|
||||
const I010BufferInterface* GetI010() const;
|
||||
const I210BufferInterface* GetI210() const;
|
||||
const I410BufferInterface* GetI410() const;
|
||||
const NV12BufferInterface* GetNV12() const;
|
||||
|
||||
// From a kNative frame, returns a VideoFrameBuffer with a pixel format in
|
||||
// the list of types that is in the main memory with a pixel perfect
|
||||
// conversion for encoding with a software encoder. Returns nullptr if the
|
||||
// frame type is not supported, mapping is not possible, or if the kNative
|
||||
// frame has not implemented this method. Only callable if type() is kNative.
|
||||
virtual rtc::scoped_refptr<VideoFrameBuffer> GetMappedFrameBuffer(
|
||||
rtc::ArrayView<Type> types);
|
||||
|
||||
protected:
|
||||
~VideoFrameBuffer() override {}
|
||||
};
|
||||
|
||||
// Update when VideoFrameBuffer::Type is updated.
|
||||
const char* VideoFrameBufferTypeToString(VideoFrameBuffer::Type type);
|
||||
|
||||
// This interface represents planar formats.
|
||||
class PlanarYuvBuffer : public VideoFrameBuffer {
|
||||
public:
|
||||
virtual int ChromaWidth() const = 0;
|
||||
virtual int ChromaHeight() const = 0;
|
||||
|
||||
// Returns the number of steps(in terms of Data*() return type) between
|
||||
// successive rows for a given plane.
|
||||
virtual int StrideY() const = 0;
|
||||
virtual int StrideU() const = 0;
|
||||
virtual int StrideV() const = 0;
|
||||
|
||||
protected:
|
||||
~PlanarYuvBuffer() override {}
|
||||
};
|
||||
|
||||
// This interface represents 8-bit color depth formats: Type::kI420,
|
||||
// Type::kI420A, Type::kI422 and Type::kI444.
|
||||
class PlanarYuv8Buffer : public PlanarYuvBuffer {
|
||||
public:
|
||||
// Returns pointer to the pixel data for a given plane. The memory is owned by
|
||||
// the VideoFrameBuffer object and must not be freed by the caller.
|
||||
virtual const uint8_t* DataY() const = 0;
|
||||
virtual const uint8_t* DataU() const = 0;
|
||||
virtual const uint8_t* DataV() const = 0;
|
||||
|
||||
protected:
|
||||
~PlanarYuv8Buffer() override {}
|
||||
};
|
||||
|
||||
class RTC_EXPORT I420BufferInterface : public PlanarYuv8Buffer {
|
||||
public:
|
||||
Type type() const override;
|
||||
|
||||
int ChromaWidth() const final;
|
||||
int ChromaHeight() const final;
|
||||
|
||||
rtc::scoped_refptr<I420BufferInterface> ToI420() final;
|
||||
const I420BufferInterface* GetI420() const final;
|
||||
|
||||
protected:
|
||||
~I420BufferInterface() override {}
|
||||
};
|
||||
|
||||
class RTC_EXPORT I420ABufferInterface : public I420BufferInterface {
|
||||
public:
|
||||
Type type() const final;
|
||||
virtual const uint8_t* DataA() const = 0;
|
||||
virtual int StrideA() const = 0;
|
||||
|
||||
protected:
|
||||
~I420ABufferInterface() override {}
|
||||
};
|
||||
|
||||
// Represents Type::kI422, 4:2:2 planar with 8 bits per pixel.
|
||||
class I422BufferInterface : public PlanarYuv8Buffer {
|
||||
public:
|
||||
Type type() const final;
|
||||
|
||||
int ChromaWidth() const final;
|
||||
int ChromaHeight() const final;
|
||||
|
||||
rtc::scoped_refptr<VideoFrameBuffer> CropAndScale(int offset_x,
|
||||
int offset_y,
|
||||
int crop_width,
|
||||
int crop_height,
|
||||
int scaled_width,
|
||||
int scaled_height) override;
|
||||
|
||||
protected:
|
||||
~I422BufferInterface() override {}
|
||||
};
|
||||
|
||||
// Represents Type::kI444, 4:4:4 planar with 8 bits per pixel.
|
||||
class I444BufferInterface : public PlanarYuv8Buffer {
|
||||
public:
|
||||
Type type() const final;
|
||||
|
||||
int ChromaWidth() const final;
|
||||
int ChromaHeight() const final;
|
||||
|
||||
rtc::scoped_refptr<VideoFrameBuffer> CropAndScale(int offset_x,
|
||||
int offset_y,
|
||||
int crop_width,
|
||||
int crop_height,
|
||||
int scaled_width,
|
||||
int scaled_height) override;
|
||||
|
||||
protected:
|
||||
~I444BufferInterface() override {}
|
||||
};
|
||||
|
||||
// This interface represents 8-bit to 16-bit color depth formats: Type::kI010 or
|
||||
// Type::kI210 .
|
||||
class PlanarYuv16BBuffer : public PlanarYuvBuffer {
|
||||
public:
|
||||
// Returns pointer to the pixel data for a given plane. The memory is owned by
|
||||
// the VideoFrameBuffer object and must not be freed by the caller.
|
||||
virtual const uint16_t* DataY() const = 0;
|
||||
virtual const uint16_t* DataU() const = 0;
|
||||
virtual const uint16_t* DataV() const = 0;
|
||||
|
||||
protected:
|
||||
~PlanarYuv16BBuffer() override {}
|
||||
};
|
||||
|
||||
// Represents Type::kI010, allocates 16 bits per pixel and fills 10 least
|
||||
// significant bits with color information.
|
||||
class I010BufferInterface : public PlanarYuv16BBuffer {
|
||||
public:
|
||||
Type type() const override;
|
||||
|
||||
int ChromaWidth() const final;
|
||||
int ChromaHeight() const final;
|
||||
|
||||
protected:
|
||||
~I010BufferInterface() override {}
|
||||
};
|
||||
|
||||
// Represents Type::kI210, allocates 16 bits per pixel and fills 10 least
|
||||
// significant bits with color information.
|
||||
class I210BufferInterface : public PlanarYuv16BBuffer {
|
||||
public:
|
||||
Type type() const override;
|
||||
|
||||
int ChromaWidth() const final;
|
||||
int ChromaHeight() const final;
|
||||
|
||||
protected:
|
||||
~I210BufferInterface() override {}
|
||||
};
|
||||
|
||||
// Represents Type::kI410, allocates 16 bits per pixel and fills 10 least
|
||||
// significant bits with color information.
|
||||
class I410BufferInterface : public PlanarYuv16BBuffer {
|
||||
public:
|
||||
Type type() const override;
|
||||
|
||||
int ChromaWidth() const final;
|
||||
int ChromaHeight() const final;
|
||||
|
||||
protected:
|
||||
~I410BufferInterface() override {}
|
||||
};
|
||||
|
||||
class BiplanarYuvBuffer : public VideoFrameBuffer {
|
||||
public:
|
||||
virtual int ChromaWidth() const = 0;
|
||||
virtual int ChromaHeight() const = 0;
|
||||
|
||||
// Returns the number of steps(in terms of Data*() return type) between
|
||||
// successive rows for a given plane.
|
||||
virtual int StrideY() const = 0;
|
||||
virtual int StrideUV() const = 0;
|
||||
|
||||
protected:
|
||||
~BiplanarYuvBuffer() override {}
|
||||
};
|
||||
|
||||
class BiplanarYuv8Buffer : public BiplanarYuvBuffer {
|
||||
public:
|
||||
virtual const uint8_t* DataY() const = 0;
|
||||
virtual const uint8_t* DataUV() const = 0;
|
||||
|
||||
protected:
|
||||
~BiplanarYuv8Buffer() override {}
|
||||
};
|
||||
|
||||
// Represents Type::kNV12. NV12 is full resolution Y and half-resolution
|
||||
// interleved UV.
|
||||
class RTC_EXPORT NV12BufferInterface : public BiplanarYuv8Buffer {
|
||||
public:
|
||||
Type type() const override;
|
||||
|
||||
int ChromaWidth() const final;
|
||||
int ChromaHeight() const final;
|
||||
|
||||
rtc::scoped_refptr<VideoFrameBuffer> CropAndScale(int offset_x,
|
||||
int offset_y,
|
||||
int crop_width,
|
||||
int crop_height,
|
||||
int scaled_width,
|
||||
int scaled_height) override;
|
||||
|
||||
protected:
|
||||
~NV12BufferInterface() override {}
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_VIDEO_FRAME_BUFFER_H_
|
||||
174
TMessagesProj/jni/voip/webrtc/api/video/video_frame_metadata.cc
Normal file
174
TMessagesProj/jni/voip/webrtc/api/video/video_frame_metadata.cc
Normal file
|
|
@ -0,0 +1,174 @@
|
|||
/*
|
||||
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/video_frame_metadata.h"
|
||||
|
||||
#include <utility>
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
VideoFrameMetadata::VideoFrameMetadata() = default;
|
||||
|
||||
VideoFrameType VideoFrameMetadata::GetFrameType() const {
|
||||
return frame_type_;
|
||||
}
|
||||
|
||||
void VideoFrameMetadata::SetFrameType(VideoFrameType frame_type) {
|
||||
frame_type_ = frame_type;
|
||||
}
|
||||
|
||||
uint16_t VideoFrameMetadata::GetWidth() const {
|
||||
return width_;
|
||||
}
|
||||
|
||||
void VideoFrameMetadata::SetWidth(uint16_t width) {
|
||||
width_ = width;
|
||||
}
|
||||
|
||||
uint16_t VideoFrameMetadata::GetHeight() const {
|
||||
return height_;
|
||||
}
|
||||
|
||||
void VideoFrameMetadata::SetHeight(uint16_t height) {
|
||||
height_ = height;
|
||||
}
|
||||
|
||||
VideoRotation VideoFrameMetadata::GetRotation() const {
|
||||
return rotation_;
|
||||
}
|
||||
|
||||
void VideoFrameMetadata::SetRotation(VideoRotation rotation) {
|
||||
rotation_ = rotation;
|
||||
}
|
||||
|
||||
VideoContentType VideoFrameMetadata::GetContentType() const {
|
||||
return content_type_;
|
||||
}
|
||||
|
||||
void VideoFrameMetadata::SetContentType(VideoContentType content_type) {
|
||||
content_type_ = content_type;
|
||||
}
|
||||
|
||||
absl::optional<int64_t> VideoFrameMetadata::GetFrameId() const {
|
||||
return frame_id_;
|
||||
}
|
||||
|
||||
void VideoFrameMetadata::SetFrameId(absl::optional<int64_t> frame_id) {
|
||||
frame_id_ = frame_id;
|
||||
}
|
||||
|
||||
int VideoFrameMetadata::GetSpatialIndex() const {
|
||||
return spatial_index_;
|
||||
}
|
||||
|
||||
void VideoFrameMetadata::SetSpatialIndex(int spatial_index) {
|
||||
spatial_index_ = spatial_index;
|
||||
}
|
||||
|
||||
int VideoFrameMetadata::GetTemporalIndex() const {
|
||||
return temporal_index_;
|
||||
}
|
||||
|
||||
void VideoFrameMetadata::SetTemporalIndex(int temporal_index) {
|
||||
temporal_index_ = temporal_index;
|
||||
}
|
||||
|
||||
rtc::ArrayView<const int64_t> VideoFrameMetadata::GetFrameDependencies() const {
|
||||
return frame_dependencies_;
|
||||
}
|
||||
|
||||
void VideoFrameMetadata::SetFrameDependencies(
|
||||
rtc::ArrayView<const int64_t> frame_dependencies) {
|
||||
frame_dependencies_.assign(frame_dependencies.begin(),
|
||||
frame_dependencies.end());
|
||||
}
|
||||
|
||||
rtc::ArrayView<const DecodeTargetIndication>
|
||||
VideoFrameMetadata::GetDecodeTargetIndications() const {
|
||||
return decode_target_indications_;
|
||||
}
|
||||
|
||||
void VideoFrameMetadata::SetDecodeTargetIndications(
|
||||
rtc::ArrayView<const DecodeTargetIndication> decode_target_indications) {
|
||||
decode_target_indications_.assign(decode_target_indications.begin(),
|
||||
decode_target_indications.end());
|
||||
}
|
||||
|
||||
bool VideoFrameMetadata::GetIsLastFrameInPicture() const {
|
||||
return is_last_frame_in_picture_;
|
||||
}
|
||||
|
||||
void VideoFrameMetadata::SetIsLastFrameInPicture(
|
||||
bool is_last_frame_in_picture) {
|
||||
is_last_frame_in_picture_ = is_last_frame_in_picture;
|
||||
}
|
||||
|
||||
uint8_t VideoFrameMetadata::GetSimulcastIdx() const {
|
||||
return simulcast_idx_;
|
||||
}
|
||||
|
||||
void VideoFrameMetadata::SetSimulcastIdx(uint8_t simulcast_idx) {
|
||||
simulcast_idx_ = simulcast_idx;
|
||||
}
|
||||
|
||||
VideoCodecType VideoFrameMetadata::GetCodec() const {
|
||||
return codec_;
|
||||
}
|
||||
|
||||
void VideoFrameMetadata::SetCodec(VideoCodecType codec) {
|
||||
codec_ = codec;
|
||||
}
|
||||
|
||||
const RTPVideoHeaderCodecSpecifics&
|
||||
VideoFrameMetadata::GetRTPVideoHeaderCodecSpecifics() const {
|
||||
return codec_specifics_;
|
||||
}
|
||||
|
||||
void VideoFrameMetadata::SetRTPVideoHeaderCodecSpecifics(
|
||||
RTPVideoHeaderCodecSpecifics codec_specifics) {
|
||||
codec_specifics_ = std::move(codec_specifics);
|
||||
}
|
||||
|
||||
uint32_t VideoFrameMetadata::GetSsrc() const {
|
||||
return ssrc_;
|
||||
}
|
||||
|
||||
void VideoFrameMetadata::SetSsrc(uint32_t ssrc) {
|
||||
ssrc_ = ssrc;
|
||||
}
|
||||
|
||||
std::vector<uint32_t> VideoFrameMetadata::GetCsrcs() const {
|
||||
return csrcs_;
|
||||
}
|
||||
|
||||
void VideoFrameMetadata::SetCsrcs(std::vector<uint32_t> csrcs) {
|
||||
csrcs_ = std::move(csrcs);
|
||||
}
|
||||
|
||||
bool operator==(const VideoFrameMetadata& lhs, const VideoFrameMetadata& rhs) {
|
||||
return lhs.frame_type_ == rhs.frame_type_ && lhs.width_ == rhs.width_ &&
|
||||
lhs.height_ == rhs.height_ && lhs.rotation_ == rhs.rotation_ &&
|
||||
lhs.content_type_ == rhs.content_type_ &&
|
||||
lhs.frame_id_ == rhs.frame_id_ &&
|
||||
lhs.spatial_index_ == rhs.spatial_index_ &&
|
||||
lhs.temporal_index_ == rhs.temporal_index_ &&
|
||||
lhs.frame_dependencies_ == rhs.frame_dependencies_ &&
|
||||
lhs.decode_target_indications_ == rhs.decode_target_indications_ &&
|
||||
lhs.is_last_frame_in_picture_ == rhs.is_last_frame_in_picture_ &&
|
||||
lhs.simulcast_idx_ == rhs.simulcast_idx_ && lhs.codec_ == rhs.codec_ &&
|
||||
lhs.codec_specifics_ == rhs.codec_specifics_ &&
|
||||
lhs.ssrc_ == rhs.ssrc_ && lhs.csrcs_ == rhs.csrcs_;
|
||||
}
|
||||
|
||||
bool operator!=(const VideoFrameMetadata& lhs, const VideoFrameMetadata& rhs) {
|
||||
return !(lhs == rhs);
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
127
TMessagesProj/jni/voip/webrtc/api/video/video_frame_metadata.h
Normal file
127
TMessagesProj/jni/voip/webrtc/api/video/video_frame_metadata.h
Normal file
|
|
@ -0,0 +1,127 @@
|
|||
/*
|
||||
* Copyright 2020 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_VIDEO_FRAME_METADATA_H_
|
||||
#define API_VIDEO_VIDEO_FRAME_METADATA_H_
|
||||
|
||||
#include <cstdint>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/container/inlined_vector.h"
|
||||
#include "absl/types/optional.h"
|
||||
#include "absl/types/variant.h"
|
||||
#include "api/array_view.h"
|
||||
#include "api/transport/rtp/dependency_descriptor.h"
|
||||
#include "api/video/video_codec_type.h"
|
||||
#include "api/video/video_content_type.h"
|
||||
#include "api/video/video_frame_type.h"
|
||||
#include "api/video/video_rotation.h"
|
||||
#include "modules/video_coding/codecs/h264/include/h264_globals.h"
|
||||
#include "modules/video_coding/codecs/vp8/include/vp8_globals.h"
|
||||
#include "modules/video_coding/codecs/vp9/include/vp9_globals.h"
|
||||
#include "rtc_base/system/rtc_export.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
using RTPVideoHeaderCodecSpecifics = absl::variant<absl::monostate,
|
||||
RTPVideoHeaderVP8,
|
||||
RTPVideoHeaderVP9,
|
||||
RTPVideoHeaderH264>;
|
||||
|
||||
// A subset of metadata from the RTP video header, exposed in insertable streams
|
||||
// API.
|
||||
class RTC_EXPORT VideoFrameMetadata {
|
||||
public:
|
||||
VideoFrameMetadata();
|
||||
VideoFrameMetadata(const VideoFrameMetadata&) = default;
|
||||
VideoFrameMetadata& operator=(const VideoFrameMetadata&) = default;
|
||||
|
||||
VideoFrameType GetFrameType() const;
|
||||
void SetFrameType(VideoFrameType frame_type);
|
||||
|
||||
uint16_t GetWidth() const;
|
||||
void SetWidth(uint16_t width);
|
||||
|
||||
uint16_t GetHeight() const;
|
||||
void SetHeight(uint16_t height);
|
||||
|
||||
VideoRotation GetRotation() const;
|
||||
void SetRotation(VideoRotation rotation);
|
||||
|
||||
VideoContentType GetContentType() const;
|
||||
void SetContentType(VideoContentType content_type);
|
||||
|
||||
absl::optional<int64_t> GetFrameId() const;
|
||||
void SetFrameId(absl::optional<int64_t> frame_id);
|
||||
|
||||
int GetSpatialIndex() const;
|
||||
void SetSpatialIndex(int spatial_index);
|
||||
|
||||
int GetTemporalIndex() const;
|
||||
void SetTemporalIndex(int temporal_index);
|
||||
|
||||
rtc::ArrayView<const int64_t> GetFrameDependencies() const;
|
||||
void SetFrameDependencies(rtc::ArrayView<const int64_t> frame_dependencies);
|
||||
|
||||
rtc::ArrayView<const DecodeTargetIndication> GetDecodeTargetIndications()
|
||||
const;
|
||||
void SetDecodeTargetIndications(
|
||||
rtc::ArrayView<const DecodeTargetIndication> decode_target_indications);
|
||||
|
||||
bool GetIsLastFrameInPicture() const;
|
||||
void SetIsLastFrameInPicture(bool is_last_frame_in_picture);
|
||||
|
||||
uint8_t GetSimulcastIdx() const;
|
||||
void SetSimulcastIdx(uint8_t simulcast_idx);
|
||||
|
||||
VideoCodecType GetCodec() const;
|
||||
void SetCodec(VideoCodecType codec);
|
||||
|
||||
// Which varient is used depends on the VideoCodecType from GetCodecs().
|
||||
const RTPVideoHeaderCodecSpecifics& GetRTPVideoHeaderCodecSpecifics() const;
|
||||
void SetRTPVideoHeaderCodecSpecifics(
|
||||
RTPVideoHeaderCodecSpecifics codec_specifics);
|
||||
|
||||
uint32_t GetSsrc() const;
|
||||
void SetSsrc(uint32_t ssrc);
|
||||
std::vector<uint32_t> GetCsrcs() const;
|
||||
void SetCsrcs(std::vector<uint32_t> csrcs);
|
||||
|
||||
friend bool operator==(const VideoFrameMetadata& lhs,
|
||||
const VideoFrameMetadata& rhs);
|
||||
friend bool operator!=(const VideoFrameMetadata& lhs,
|
||||
const VideoFrameMetadata& rhs);
|
||||
|
||||
private:
|
||||
VideoFrameType frame_type_ = VideoFrameType::kEmptyFrame;
|
||||
int16_t width_ = 0;
|
||||
int16_t height_ = 0;
|
||||
VideoRotation rotation_ = VideoRotation::kVideoRotation_0;
|
||||
VideoContentType content_type_ = VideoContentType::UNSPECIFIED;
|
||||
|
||||
// Corresponding to GenericDescriptorInfo.
|
||||
absl::optional<int64_t> frame_id_;
|
||||
int spatial_index_ = 0;
|
||||
int temporal_index_ = 0;
|
||||
absl::InlinedVector<int64_t, 5> frame_dependencies_;
|
||||
absl::InlinedVector<DecodeTargetIndication, 10> decode_target_indications_;
|
||||
|
||||
bool is_last_frame_in_picture_ = true;
|
||||
uint8_t simulcast_idx_ = 0;
|
||||
VideoCodecType codec_ = VideoCodecType::kVideoCodecGeneric;
|
||||
RTPVideoHeaderCodecSpecifics codec_specifics_;
|
||||
|
||||
// RTP info.
|
||||
uint32_t ssrc_ = 0u;
|
||||
std::vector<uint32_t> csrcs_;
|
||||
};
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_VIDEO_FRAME_METADATA_H_
|
||||
|
|
@ -0,0 +1,123 @@
|
|||
/*
|
||||
* Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/video_frame_metadata.h"
|
||||
|
||||
#include "api/video/video_frame.h"
|
||||
#include "modules/video_coding/codecs/h264/include/h264_globals.h"
|
||||
#include "modules/video_coding/codecs/vp9/include/vp9_globals.h"
|
||||
#include "test/gtest.h"
|
||||
#include "video/video_receive_stream2.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace {
|
||||
|
||||
RTPVideoHeaderH264 ExampleHeaderH264() {
|
||||
NaluInfo nalu_info;
|
||||
nalu_info.type = 1;
|
||||
nalu_info.sps_id = 2;
|
||||
nalu_info.pps_id = 3;
|
||||
|
||||
RTPVideoHeaderH264 header;
|
||||
header.nalu_type = 4;
|
||||
header.packetization_type = H264PacketizationTypes::kH264StapA;
|
||||
header.nalus[0] = nalu_info;
|
||||
header.nalus_length = 1;
|
||||
header.packetization_mode = H264PacketizationMode::SingleNalUnit;
|
||||
return header;
|
||||
}
|
||||
|
||||
RTPVideoHeaderVP9 ExampleHeaderVP9() {
|
||||
RTPVideoHeaderVP9 header;
|
||||
header.InitRTPVideoHeaderVP9();
|
||||
header.inter_pic_predicted = true;
|
||||
header.flexible_mode = true;
|
||||
header.beginning_of_frame = true;
|
||||
header.end_of_frame = true;
|
||||
header.ss_data_available = true;
|
||||
header.non_ref_for_inter_layer_pred = true;
|
||||
header.picture_id = 1;
|
||||
header.max_picture_id = 2;
|
||||
header.tl0_pic_idx = 3;
|
||||
header.temporal_idx = 4;
|
||||
header.spatial_idx = 5;
|
||||
header.temporal_up_switch = true;
|
||||
header.inter_layer_predicted = true;
|
||||
header.gof_idx = 6;
|
||||
header.num_ref_pics = 1;
|
||||
header.pid_diff[0] = 8;
|
||||
header.ref_picture_id[0] = 9;
|
||||
header.num_spatial_layers = 1;
|
||||
header.first_active_layer = 0;
|
||||
header.spatial_layer_resolution_present = true;
|
||||
header.width[0] = 12;
|
||||
header.height[0] = 13;
|
||||
header.end_of_picture = true;
|
||||
header.gof.SetGofInfoVP9(TemporalStructureMode::kTemporalStructureMode1);
|
||||
header.gof.pid_start = 14;
|
||||
return header;
|
||||
}
|
||||
|
||||
TEST(VideoFrameMetadataTest, H264MetadataEquality) {
|
||||
RTPVideoHeaderH264 header = ExampleHeaderH264();
|
||||
|
||||
VideoFrameMetadata metadata_lhs;
|
||||
metadata_lhs.SetRTPVideoHeaderCodecSpecifics(header);
|
||||
|
||||
VideoFrameMetadata metadata_rhs;
|
||||
metadata_rhs.SetRTPVideoHeaderCodecSpecifics(header);
|
||||
|
||||
EXPECT_TRUE(metadata_lhs == metadata_rhs);
|
||||
EXPECT_FALSE(metadata_lhs != metadata_rhs);
|
||||
}
|
||||
|
||||
TEST(VideoFrameMetadataTest, H264MetadataInequality) {
|
||||
RTPVideoHeaderH264 header = ExampleHeaderH264();
|
||||
|
||||
VideoFrameMetadata metadata_lhs;
|
||||
metadata_lhs.SetRTPVideoHeaderCodecSpecifics(header);
|
||||
|
||||
VideoFrameMetadata metadata_rhs;
|
||||
header.nalus[0].type = 17;
|
||||
metadata_rhs.SetRTPVideoHeaderCodecSpecifics(header);
|
||||
|
||||
EXPECT_FALSE(metadata_lhs == metadata_rhs);
|
||||
EXPECT_TRUE(metadata_lhs != metadata_rhs);
|
||||
}
|
||||
|
||||
TEST(VideoFrameMetadataTest, VP9MetadataEquality) {
|
||||
RTPVideoHeaderVP9 header = ExampleHeaderVP9();
|
||||
|
||||
VideoFrameMetadata metadata_lhs;
|
||||
metadata_lhs.SetRTPVideoHeaderCodecSpecifics(header);
|
||||
|
||||
VideoFrameMetadata metadata_rhs;
|
||||
metadata_rhs.SetRTPVideoHeaderCodecSpecifics(header);
|
||||
|
||||
EXPECT_TRUE(metadata_lhs == metadata_rhs);
|
||||
EXPECT_FALSE(metadata_lhs != metadata_rhs);
|
||||
}
|
||||
|
||||
TEST(VideoFrameMetadataTest, VP9MetadataInequality) {
|
||||
RTPVideoHeaderVP9 header = ExampleHeaderVP9();
|
||||
|
||||
VideoFrameMetadata metadata_lhs;
|
||||
metadata_lhs.SetRTPVideoHeaderCodecSpecifics(header);
|
||||
|
||||
VideoFrameMetadata metadata_rhs;
|
||||
header.gof.pid_diff[0][0] = 42;
|
||||
metadata_rhs.SetRTPVideoHeaderCodecSpecifics(header);
|
||||
|
||||
EXPECT_FALSE(metadata_lhs == metadata_rhs);
|
||||
EXPECT_TRUE(metadata_lhs != metadata_rhs);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
} // namespace webrtc
|
||||
43
TMessagesProj/jni/voip/webrtc/api/video/video_frame_type.h
Normal file
43
TMessagesProj/jni/voip/webrtc/api/video/video_frame_type.h
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
/*
|
||||
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_VIDEO_FRAME_TYPE_H_
|
||||
#define API_VIDEO_VIDEO_FRAME_TYPE_H_
|
||||
|
||||
#include "absl/strings/string_view.h"
|
||||
#include "rtc_base/checks.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
enum class VideoFrameType {
|
||||
kEmptyFrame = 0,
|
||||
// Wire format for MultiplexEncodedImagePacker seems to depend on numerical
|
||||
// values of these constants.
|
||||
kVideoFrameKey = 3,
|
||||
kVideoFrameDelta = 4,
|
||||
};
|
||||
|
||||
inline constexpr absl::string_view VideoFrameTypeToString(
|
||||
VideoFrameType frame_type) {
|
||||
switch (frame_type) {
|
||||
case VideoFrameType::kEmptyFrame:
|
||||
return "empty";
|
||||
case VideoFrameType::kVideoFrameKey:
|
||||
return "key";
|
||||
case VideoFrameType::kVideoFrameDelta:
|
||||
return "delta";
|
||||
}
|
||||
RTC_CHECK_NOTREACHED();
|
||||
return "";
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_VIDEO_FRAME_TYPE_H_
|
||||
|
|
@ -0,0 +1,77 @@
|
|||
/*
|
||||
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_VIDEO_LAYERS_ALLOCATION_H_
|
||||
#define API_VIDEO_VIDEO_LAYERS_ALLOCATION_H_
|
||||
|
||||
#include <cstdint>
|
||||
|
||||
#include "absl/container/inlined_vector.h"
|
||||
#include "api/units/data_rate.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// This struct contains additional stream-level information needed by a
|
||||
// Selective Forwarding Middlebox to make relay decisions of RTP streams.
|
||||
struct VideoLayersAllocation {
|
||||
static constexpr int kMaxSpatialIds = 4;
|
||||
static constexpr int kMaxTemporalIds = 4;
|
||||
|
||||
friend bool operator==(const VideoLayersAllocation& lhs,
|
||||
const VideoLayersAllocation& rhs) {
|
||||
return lhs.rtp_stream_index == rhs.rtp_stream_index &&
|
||||
lhs.resolution_and_frame_rate_is_valid ==
|
||||
rhs.resolution_and_frame_rate_is_valid &&
|
||||
lhs.active_spatial_layers == rhs.active_spatial_layers;
|
||||
}
|
||||
|
||||
friend bool operator!=(const VideoLayersAllocation& lhs,
|
||||
const VideoLayersAllocation& rhs) {
|
||||
return !(lhs == rhs);
|
||||
}
|
||||
|
||||
struct SpatialLayer {
|
||||
friend bool operator==(const SpatialLayer& lhs, const SpatialLayer& rhs) {
|
||||
return lhs.rtp_stream_index == rhs.rtp_stream_index &&
|
||||
lhs.spatial_id == rhs.spatial_id &&
|
||||
lhs.target_bitrate_per_temporal_layer ==
|
||||
rhs.target_bitrate_per_temporal_layer &&
|
||||
lhs.width == rhs.width && lhs.height == rhs.height &&
|
||||
lhs.frame_rate_fps == rhs.frame_rate_fps;
|
||||
}
|
||||
|
||||
friend bool operator!=(const SpatialLayer& lhs, const SpatialLayer& rhs) {
|
||||
return !(lhs == rhs);
|
||||
}
|
||||
int rtp_stream_index = 0;
|
||||
// Index of the spatial layer per `rtp_stream_index`.
|
||||
int spatial_id = 0;
|
||||
// Target bitrate per decode target.
|
||||
absl::InlinedVector<DataRate, kMaxTemporalIds>
|
||||
target_bitrate_per_temporal_layer;
|
||||
|
||||
// These fields are only valid if `resolution_and_frame_rate_is_valid` is
|
||||
// true
|
||||
uint16_t width = 0;
|
||||
uint16_t height = 0;
|
||||
// Max frame rate used in any temporal layer of this spatial layer.
|
||||
uint8_t frame_rate_fps = 0;
|
||||
};
|
||||
|
||||
// Index of the rtp stream this allocation is sent on. Used for mapping
|
||||
// a SpatialLayer to a rtp stream.
|
||||
int rtp_stream_index = 0;
|
||||
bool resolution_and_frame_rate_is_valid = false;
|
||||
absl::InlinedVector<SpatialLayer, kMaxSpatialIds> active_spatial_layers;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_VIDEO_LAYERS_ALLOCATION_H_
|
||||
26
TMessagesProj/jni/voip/webrtc/api/video/video_rotation.h
Normal file
26
TMessagesProj/jni/voip/webrtc/api/video/video_rotation.h
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_VIDEO_ROTATION_H_
|
||||
#define API_VIDEO_VIDEO_ROTATION_H_
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// enum for clockwise rotation.
|
||||
enum VideoRotation {
|
||||
kVideoRotation_0 = 0,
|
||||
kVideoRotation_90 = 90,
|
||||
kVideoRotation_180 = 180,
|
||||
kVideoRotation_270 = 270
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_VIDEO_ROTATION_H_
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
/*
|
||||
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_VIDEO_SINK_INTERFACE_H_
|
||||
#define API_VIDEO_VIDEO_SINK_INTERFACE_H_
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/video_track_source_constraints.h"
|
||||
#include "rtc_base/checks.h"
|
||||
|
||||
namespace rtc {
|
||||
|
||||
template <typename VideoFrameT>
|
||||
class VideoSinkInterface {
|
||||
public:
|
||||
virtual ~VideoSinkInterface() = default;
|
||||
|
||||
virtual void OnFrame(const VideoFrameT& frame) = 0;
|
||||
|
||||
// Should be called by the source when it discards the frame due to rate
|
||||
// limiting.
|
||||
virtual void OnDiscardedFrame() {}
|
||||
|
||||
// Called on the network thread when video constraints change.
|
||||
// TODO(crbug/1255737): make pure virtual once downstream project adapts.
|
||||
virtual void OnConstraintsChanged(
|
||||
const webrtc::VideoTrackSourceConstraints& constraints) {}
|
||||
};
|
||||
|
||||
} // namespace rtc
|
||||
|
||||
#endif // API_VIDEO_VIDEO_SINK_INTERFACE_H_
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
/*
|
||||
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/video_source_interface.h"
|
||||
|
||||
namespace rtc {
|
||||
|
||||
VideoSinkWants::VideoSinkWants() = default;
|
||||
VideoSinkWants::VideoSinkWants(const VideoSinkWants&) = default;
|
||||
VideoSinkWants::~VideoSinkWants() = default;
|
||||
|
||||
} // namespace rtc
|
||||
135
TMessagesProj/jni/voip/webrtc/api/video/video_source_interface.h
Normal file
135
TMessagesProj/jni/voip/webrtc/api/video/video_source_interface.h
Normal file
|
|
@ -0,0 +1,135 @@
|
|||
/*
|
||||
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_VIDEO_SOURCE_INTERFACE_H_
|
||||
#define API_VIDEO_VIDEO_SOURCE_INTERFACE_H_
|
||||
|
||||
#include <limits>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/video/video_sink_interface.h"
|
||||
#include "rtc_base/system/rtc_export.h"
|
||||
|
||||
namespace rtc {
|
||||
|
||||
// VideoSinkWants is used for notifying the source of properties a video frame
|
||||
// should have when it is delivered to a certain sink.
|
||||
struct RTC_EXPORT VideoSinkWants {
|
||||
struct FrameSize {
|
||||
FrameSize(int width, int height) : width(width), height(height) {}
|
||||
FrameSize(const FrameSize&) = default;
|
||||
~FrameSize() = default;
|
||||
|
||||
int width;
|
||||
int height;
|
||||
};
|
||||
|
||||
VideoSinkWants();
|
||||
VideoSinkWants(const VideoSinkWants&);
|
||||
~VideoSinkWants();
|
||||
|
||||
// Tells the source whether the sink wants frames with rotation applied.
|
||||
// By default, any rotation must be applied by the sink.
|
||||
bool rotation_applied = false;
|
||||
|
||||
// Tells the source that the sink only wants black frames.
|
||||
bool black_frames = false;
|
||||
|
||||
// Tells the source the maximum number of pixels the sink wants.
|
||||
int max_pixel_count = std::numeric_limits<int>::max();
|
||||
// Tells the source the desired number of pixels the sinks wants. This will
|
||||
// typically be used when stepping the resolution up again when conditions
|
||||
// have improved after an earlier downgrade. The source should select the
|
||||
// closest resolution to this pixel count, but if max_pixel_count is set, it
|
||||
// still sets the absolute upper bound.
|
||||
absl::optional<int> target_pixel_count;
|
||||
// Tells the source the maximum framerate the sink wants.
|
||||
int max_framerate_fps = std::numeric_limits<int>::max();
|
||||
|
||||
// Tells the source that the sink wants width and height of the video frames
|
||||
// to be divisible by `resolution_alignment`.
|
||||
// For example: With I420, this value would be a multiple of 2.
|
||||
// Note that this field is unrelated to any horizontal or vertical stride
|
||||
// requirements the encoder has on the incoming video frame buffers.
|
||||
int resolution_alignment = 1;
|
||||
|
||||
// The resolutions that sink is configured to consume. If the sink is an
|
||||
// encoder this is what the encoder is configured to encode. In singlecast we
|
||||
// only encode one resolution, but in simulcast and SVC this can mean multiple
|
||||
// resolutions per frame.
|
||||
//
|
||||
// The sink is always configured to consume a subset of the
|
||||
// webrtc::VideoFrame's resolution. In the case of encoding, we usually encode
|
||||
// at webrtc::VideoFrame's resolution but this may not always be the case due
|
||||
// to scaleResolutionDownBy or turning off simulcast or SVC layers.
|
||||
//
|
||||
// For example, we may capture at 720p and due to adaptation (e.g. applying
|
||||
// `max_pixel_count` constraints) create webrtc::VideoFrames of size 480p, but
|
||||
// if we do scaleResolutionDownBy:2 then the only resolution we end up
|
||||
// encoding is 240p. In this case we still need to provide webrtc::VideoFrames
|
||||
// of size 480p but we can optimize internal buffers for 240p, avoiding
|
||||
// downsampling to 480p if possible.
|
||||
//
|
||||
// Note that the `resolutions` can change while frames are in flight and
|
||||
// should only be used as a hint when constructing the webrtc::VideoFrame.
|
||||
std::vector<FrameSize> resolutions;
|
||||
|
||||
// This is the resolution requested by the user using RtpEncodingParameters.
|
||||
absl::optional<FrameSize> requested_resolution;
|
||||
|
||||
// `is_active` : Is this VideoSinkWants from an encoder that is encoding any
|
||||
// layer. IF YES, it will affect how the VideoAdapter will choose to
|
||||
// prioritize the OnOutputFormatRequest vs. requested_resolution. IF NO,
|
||||
// VideoAdapter consider this VideoSinkWants as a passive listener (e.g a
|
||||
// VideoRenderer or a VideoEncoder that is not currently actively encoding).
|
||||
bool is_active = false;
|
||||
|
||||
// This sub-struct contains information computed by VideoBroadcaster
|
||||
// that aggregates several VideoSinkWants (and sends them to
|
||||
// AdaptedVideoTrackSource).
|
||||
struct Aggregates {
|
||||
// `active_without_requested_resolution` is set by VideoBroadcaster
|
||||
// when aggregating sink wants if there exists any sink (encoder) that is
|
||||
// active but has not set the `requested_resolution`, i.e is relying on
|
||||
// OnOutputFormatRequest to handle encode resolution.
|
||||
bool any_active_without_requested_resolution = false;
|
||||
};
|
||||
absl::optional<Aggregates> aggregates;
|
||||
};
|
||||
|
||||
inline bool operator==(const VideoSinkWants::FrameSize& a,
|
||||
const VideoSinkWants::FrameSize& b) {
|
||||
return a.width == b.width && a.height == b.height;
|
||||
}
|
||||
|
||||
inline bool operator!=(const VideoSinkWants::FrameSize& a,
|
||||
const VideoSinkWants::FrameSize& b) {
|
||||
return !(a == b);
|
||||
}
|
||||
|
||||
template <typename VideoFrameT>
|
||||
class VideoSourceInterface {
|
||||
public:
|
||||
virtual ~VideoSourceInterface() = default;
|
||||
|
||||
virtual void AddOrUpdateSink(VideoSinkInterface<VideoFrameT>* sink,
|
||||
const VideoSinkWants& wants) = 0;
|
||||
// RemoveSink must guarantee that at the time the method returns,
|
||||
// there is no current and no future calls to VideoSinkInterface::OnFrame.
|
||||
virtual void RemoveSink(VideoSinkInterface<VideoFrameT>* sink) = 0;
|
||||
|
||||
// Request underlying source to capture a new frame.
|
||||
// TODO(crbug/1255737): make pure virtual once downstream projects adapt.
|
||||
virtual void RequestRefreshFrame() {}
|
||||
};
|
||||
|
||||
} // namespace rtc
|
||||
#endif // API_VIDEO_VIDEO_SOURCE_INTERFACE_H_
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
/*
|
||||
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_VIDEO_STREAM_ENCODER_SETTINGS_H_
|
||||
#define API_VIDEO_VIDEO_STREAM_ENCODER_SETTINGS_H_
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "api/video/video_bitrate_allocator_factory.h"
|
||||
#include "api/video_codecs/video_encoder.h"
|
||||
#include "api/video_codecs/video_encoder_factory.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class EncoderSwitchRequestCallback {
|
||||
public:
|
||||
virtual ~EncoderSwitchRequestCallback() {}
|
||||
|
||||
// Requests switch to next negotiated encoder.
|
||||
virtual void RequestEncoderFallback() = 0;
|
||||
|
||||
// Requests switch to a specific encoder. If the encoder is not available and
|
||||
// `allow_default_fallback` is `true` the default fallback is invoked.
|
||||
virtual void RequestEncoderSwitch(const SdpVideoFormat& format,
|
||||
bool allow_default_fallback) = 0;
|
||||
};
|
||||
|
||||
struct VideoStreamEncoderSettings {
|
||||
explicit VideoStreamEncoderSettings(
|
||||
const VideoEncoder::Capabilities& capabilities)
|
||||
: capabilities(capabilities) {}
|
||||
|
||||
// Enables the new method to estimate the cpu load from encoding, used for
|
||||
// cpu adaptation.
|
||||
bool experiment_cpu_load_estimator = false;
|
||||
|
||||
// Ownership stays with WebrtcVideoEngine (delegated from PeerConnection).
|
||||
VideoEncoderFactory* encoder_factory = nullptr;
|
||||
|
||||
// Requests the WebRtcVideoChannel to perform a codec switch.
|
||||
EncoderSwitchRequestCallback* encoder_switch_request_callback = nullptr;
|
||||
|
||||
// Ownership stays with WebrtcVideoEngine (delegated from PeerConnection).
|
||||
VideoBitrateAllocatorFactory* bitrate_allocator_factory = nullptr;
|
||||
|
||||
// Negotiated capabilities which the VideoEncoder may expect the other
|
||||
// side to use.
|
||||
VideoEncoder::Capabilities capabilities;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_VIDEO_STREAM_ENCODER_SETTINGS_H_
|
||||
122
TMessagesProj/jni/voip/webrtc/api/video/video_timing.cc
Normal file
122
TMessagesProj/jni/voip/webrtc/api/video/video_timing.cc
Normal file
|
|
@ -0,0 +1,122 @@
|
|||
/*
|
||||
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/video_timing.h"
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
#include "api/array_view.h"
|
||||
#include "api/units/time_delta.h"
|
||||
#include "rtc_base/logging.h"
|
||||
#include "rtc_base/numerics/safe_conversions.h"
|
||||
#include "rtc_base/strings/string_builder.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
uint16_t VideoSendTiming::GetDeltaCappedMs(int64_t base_ms, int64_t time_ms) {
|
||||
if (time_ms < base_ms) {
|
||||
RTC_DLOG(LS_ERROR) << "Delta " << (time_ms - base_ms)
|
||||
<< "ms expected to be positive";
|
||||
}
|
||||
return rtc::saturated_cast<uint16_t>(time_ms - base_ms);
|
||||
}
|
||||
|
||||
uint16_t VideoSendTiming::GetDeltaCappedMs(TimeDelta delta) {
|
||||
if (delta < TimeDelta::Zero()) {
|
||||
RTC_DLOG(LS_ERROR) << "Delta " << delta.ms()
|
||||
<< "ms expected to be positive";
|
||||
}
|
||||
return rtc::saturated_cast<uint16_t>(delta.ms());
|
||||
}
|
||||
|
||||
TimingFrameInfo::TimingFrameInfo()
|
||||
: rtp_timestamp(0),
|
||||
capture_time_ms(-1),
|
||||
encode_start_ms(-1),
|
||||
encode_finish_ms(-1),
|
||||
packetization_finish_ms(-1),
|
||||
pacer_exit_ms(-1),
|
||||
network_timestamp_ms(-1),
|
||||
network2_timestamp_ms(-1),
|
||||
receive_start_ms(-1),
|
||||
receive_finish_ms(-1),
|
||||
decode_start_ms(-1),
|
||||
decode_finish_ms(-1),
|
||||
render_time_ms(-1),
|
||||
flags(VideoSendTiming::kNotTriggered) {}
|
||||
|
||||
int64_t TimingFrameInfo::EndToEndDelay() const {
|
||||
return capture_time_ms >= 0 ? decode_finish_ms - capture_time_ms : -1;
|
||||
}
|
||||
|
||||
bool TimingFrameInfo::IsLongerThan(const TimingFrameInfo& other) const {
|
||||
int64_t other_delay = other.EndToEndDelay();
|
||||
return other_delay == -1 || EndToEndDelay() > other_delay;
|
||||
}
|
||||
|
||||
bool TimingFrameInfo::operator<(const TimingFrameInfo& other) const {
|
||||
return other.IsLongerThan(*this);
|
||||
}
|
||||
|
||||
bool TimingFrameInfo::operator<=(const TimingFrameInfo& other) const {
|
||||
return !IsLongerThan(other);
|
||||
}
|
||||
|
||||
bool TimingFrameInfo::IsOutlier() const {
|
||||
return !IsInvalid() && (flags & VideoSendTiming::kTriggeredBySize);
|
||||
}
|
||||
|
||||
bool TimingFrameInfo::IsTimerTriggered() const {
|
||||
return !IsInvalid() && (flags & VideoSendTiming::kTriggeredByTimer);
|
||||
}
|
||||
|
||||
bool TimingFrameInfo::IsInvalid() const {
|
||||
return flags == VideoSendTiming::kInvalid;
|
||||
}
|
||||
|
||||
std::string TimingFrameInfo::ToString() const {
|
||||
if (IsInvalid()) {
|
||||
return "";
|
||||
}
|
||||
|
||||
char buf[1024];
|
||||
rtc::SimpleStringBuilder sb(buf);
|
||||
|
||||
sb << rtp_timestamp << ',' << capture_time_ms << ',' << encode_start_ms << ','
|
||||
<< encode_finish_ms << ',' << packetization_finish_ms << ','
|
||||
<< pacer_exit_ms << ',' << network_timestamp_ms << ','
|
||||
<< network2_timestamp_ms << ',' << receive_start_ms << ','
|
||||
<< receive_finish_ms << ',' << decode_start_ms << ',' << decode_finish_ms
|
||||
<< ',' << render_time_ms << ',' << IsOutlier() << ','
|
||||
<< IsTimerTriggered();
|
||||
|
||||
return sb.str();
|
||||
}
|
||||
|
||||
VideoPlayoutDelay::VideoPlayoutDelay(TimeDelta min, TimeDelta max)
|
||||
: min_(std::clamp(min, TimeDelta::Zero(), kMax)),
|
||||
max_(std::clamp(max, min_, kMax)) {
|
||||
if (!(TimeDelta::Zero() <= min && min <= max && max <= kMax)) {
|
||||
RTC_LOG(LS_ERROR) << "Invalid video playout delay: [" << min << "," << max
|
||||
<< "]. Clamped to [" << this->min() << "," << this->max()
|
||||
<< "]";
|
||||
}
|
||||
}
|
||||
|
||||
bool VideoPlayoutDelay::Set(TimeDelta min, TimeDelta max) {
|
||||
if (TimeDelta::Zero() <= min && min <= max && max <= kMax) {
|
||||
min_ = min;
|
||||
max_ = max;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
150
TMessagesProj/jni/voip/webrtc/api/video/video_timing.h
Normal file
150
TMessagesProj/jni/voip/webrtc/api/video/video_timing.h
Normal file
|
|
@ -0,0 +1,150 @@
|
|||
/*
|
||||
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_VIDEO_TIMING_H_
|
||||
#define API_VIDEO_VIDEO_TIMING_H_
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#include <limits>
|
||||
#include <string>
|
||||
|
||||
#include "api/units/time_delta.h"
|
||||
#include "rtc_base/system/rtc_export.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Video timing timestamps in ms counted from capture_time_ms of a frame.
|
||||
// This structure represents data sent in video-timing RTP header extension.
|
||||
struct RTC_EXPORT VideoSendTiming {
|
||||
enum TimingFrameFlags : uint8_t {
|
||||
kNotTriggered = 0, // Timing info valid, but not to be transmitted.
|
||||
// Used on send-side only.
|
||||
kTriggeredByTimer = 1 << 0, // Frame marked for tracing by periodic timer.
|
||||
kTriggeredBySize = 1 << 1, // Frame marked for tracing due to size.
|
||||
kInvalid = std::numeric_limits<uint8_t>::max() // Invalid, ignore!
|
||||
};
|
||||
|
||||
// Returns |time_ms - base_ms| capped at max 16-bit value.
|
||||
// Used to fill this data structure as per
|
||||
// https://webrtc.org/experiments/rtp-hdrext/video-timing/ extension stores
|
||||
// 16-bit deltas of timestamps from packet capture time.
|
||||
static uint16_t GetDeltaCappedMs(int64_t base_ms, int64_t time_ms);
|
||||
static uint16_t GetDeltaCappedMs(TimeDelta delta);
|
||||
|
||||
uint16_t encode_start_delta_ms;
|
||||
uint16_t encode_finish_delta_ms;
|
||||
uint16_t packetization_finish_delta_ms;
|
||||
uint16_t pacer_exit_delta_ms;
|
||||
uint16_t network_timestamp_delta_ms;
|
||||
uint16_t network2_timestamp_delta_ms;
|
||||
uint8_t flags = TimingFrameFlags::kInvalid;
|
||||
};
|
||||
|
||||
// Used to report precise timings of a 'timing frames'. Contains all important
|
||||
// timestamps for a lifetime of that specific frame. Reported as a string via
|
||||
// GetStats(). Only frame which took the longest between two GetStats calls is
|
||||
// reported.
|
||||
struct RTC_EXPORT TimingFrameInfo {
|
||||
TimingFrameInfo();
|
||||
|
||||
// Returns end-to-end delay of a frame, if sender and receiver timestamps are
|
||||
// synchronized, -1 otherwise.
|
||||
int64_t EndToEndDelay() const;
|
||||
|
||||
// Returns true if current frame took longer to process than `other` frame.
|
||||
// If other frame's clocks are not synchronized, current frame is always
|
||||
// preferred.
|
||||
bool IsLongerThan(const TimingFrameInfo& other) const;
|
||||
|
||||
// Returns true if flags are set to indicate this frame was marked for tracing
|
||||
// due to the size being outside some limit.
|
||||
bool IsOutlier() const;
|
||||
|
||||
// Returns true if flags are set to indicate this frame was marked fro tracing
|
||||
// due to cyclic timer.
|
||||
bool IsTimerTriggered() const;
|
||||
|
||||
// Returns true if the timing data is marked as invalid, in which case it
|
||||
// should be ignored.
|
||||
bool IsInvalid() const;
|
||||
|
||||
std::string ToString() const;
|
||||
|
||||
bool operator<(const TimingFrameInfo& other) const;
|
||||
|
||||
bool operator<=(const TimingFrameInfo& other) const;
|
||||
|
||||
uint32_t rtp_timestamp; // Identifier of a frame.
|
||||
// All timestamps below are in local monotonous clock of a receiver.
|
||||
// If sender clock is not yet estimated, sender timestamps
|
||||
// (capture_time_ms ... pacer_exit_ms) are negative values, still
|
||||
// relatively correct.
|
||||
int64_t capture_time_ms; // Captrue time of a frame.
|
||||
int64_t encode_start_ms; // Encode start time.
|
||||
int64_t encode_finish_ms; // Encode completion time.
|
||||
int64_t packetization_finish_ms; // Time when frame was passed to pacer.
|
||||
int64_t pacer_exit_ms; // Time when last packet was pushed out of pacer.
|
||||
// Two in-network RTP processor timestamps: meaning is application specific.
|
||||
int64_t network_timestamp_ms;
|
||||
int64_t network2_timestamp_ms;
|
||||
int64_t receive_start_ms; // First received packet time.
|
||||
int64_t receive_finish_ms; // Last received packet time.
|
||||
int64_t decode_start_ms; // Decode start time.
|
||||
int64_t decode_finish_ms; // Decode completion time.
|
||||
int64_t render_time_ms; // Proposed render time to insure smooth playback.
|
||||
|
||||
uint8_t flags; // Flags indicating validity and/or why tracing was triggered.
|
||||
};
|
||||
|
||||
// Minimum and maximum playout delay values from capture to render.
|
||||
// These are best effort values.
|
||||
//
|
||||
// min = max = 0 indicates that the receiver should try and render
|
||||
// frame as soon as possible.
|
||||
//
|
||||
// min = x, max = y indicates that the receiver is free to adapt
|
||||
// in the range (x, y) based on network jitter.
|
||||
// This class ensures invariant 0 <= min <= max <= kMax.
|
||||
class RTC_EXPORT VideoPlayoutDelay {
|
||||
public:
|
||||
// Maximum supported value for the delay limit.
|
||||
static constexpr TimeDelta kMax = TimeDelta::Millis(10) * 0xFFF;
|
||||
|
||||
// Creates delay limits that indicates receiver should try to render frame
|
||||
// as soon as possible.
|
||||
static VideoPlayoutDelay Minimal() {
|
||||
return VideoPlayoutDelay(TimeDelta::Zero(), TimeDelta::Zero());
|
||||
}
|
||||
|
||||
// Creates valid, but unspecified limits.
|
||||
VideoPlayoutDelay() = default;
|
||||
VideoPlayoutDelay(const VideoPlayoutDelay&) = default;
|
||||
VideoPlayoutDelay& operator=(const VideoPlayoutDelay&) = default;
|
||||
VideoPlayoutDelay(TimeDelta min, TimeDelta max);
|
||||
|
||||
bool Set(TimeDelta min, TimeDelta max);
|
||||
|
||||
TimeDelta min() const { return min_; }
|
||||
TimeDelta max() const { return max_; }
|
||||
|
||||
friend bool operator==(const VideoPlayoutDelay& lhs,
|
||||
const VideoPlayoutDelay& rhs) {
|
||||
return lhs.min_ == rhs.min_ && lhs.max_ == rhs.max_;
|
||||
}
|
||||
|
||||
private:
|
||||
TimeDelta min_ = TimeDelta::Zero();
|
||||
TimeDelta max_ = kMax;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_VIDEO_TIMING_H_
|
||||
Loading…
Add table
Add a link
Reference in a new issue