Repo created

This commit is contained in:
Fr4nz D13trich 2025-11-22 14:04:28 +01:00
parent 81b91f4139
commit f8c34fa5ee
22732 changed files with 4815320 additions and 2 deletions

View file

@ -0,0 +1 @@
ssilkin@webrtc.org

View file

@ -0,0 +1,299 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "common_video/h264/h264_bitstream_parser.h"
#include <stdlib.h>
#include <cstdint>
#include <vector>
#include "common_video/h264/h264_common.h"
#include "rtc_base/bitstream_reader.h"
#include "rtc_base/logging.h"
namespace webrtc {
namespace {
constexpr int kMaxAbsQpDeltaValue = 51;
constexpr int kMinQpValue = 0;
constexpr int kMaxQpValue = 51;
} // namespace
H264BitstreamParser::H264BitstreamParser() = default;
H264BitstreamParser::~H264BitstreamParser() = default;
H264BitstreamParser::Result H264BitstreamParser::ParseNonParameterSetNalu(
const uint8_t* source,
size_t source_length,
uint8_t nalu_type) {
if (!sps_ || !pps_)
return kInvalidStream;
last_slice_qp_delta_ = absl::nullopt;
const std::vector<uint8_t> slice_rbsp =
H264::ParseRbsp(source, source_length);
if (slice_rbsp.size() < H264::kNaluTypeSize)
return kInvalidStream;
BitstreamReader slice_reader(slice_rbsp);
slice_reader.ConsumeBits(H264::kNaluTypeSize * 8);
// Check to see if this is an IDR slice, which has an extra field to parse
// out.
bool is_idr = (source[0] & 0x0F) == H264::NaluType::kIdr;
uint8_t nal_ref_idc = (source[0] & 0x60) >> 5;
// first_mb_in_slice: ue(v)
slice_reader.ReadExponentialGolomb();
// slice_type: ue(v)
uint32_t slice_type = slice_reader.ReadExponentialGolomb();
// slice_type's 5..9 range is used to indicate that all slices of a picture
// have the same value of slice_type % 5, we don't care about that, so we map
// to the corresponding 0..4 range.
slice_type %= 5;
// pic_parameter_set_id: ue(v)
slice_reader.ReadExponentialGolomb();
if (sps_->separate_colour_plane_flag == 1) {
// colour_plane_id
slice_reader.ConsumeBits(2);
}
// frame_num: u(v)
// Represented by log2_max_frame_num bits.
slice_reader.ConsumeBits(sps_->log2_max_frame_num);
bool field_pic_flag = false;
if (sps_->frame_mbs_only_flag == 0) {
// field_pic_flag: u(1)
field_pic_flag = slice_reader.Read<bool>();
if (field_pic_flag) {
// bottom_field_flag: u(1)
slice_reader.ConsumeBits(1);
}
}
if (is_idr) {
// idr_pic_id: ue(v)
slice_reader.ReadExponentialGolomb();
}
// pic_order_cnt_lsb: u(v)
// Represented by sps_.log2_max_pic_order_cnt_lsb bits.
if (sps_->pic_order_cnt_type == 0) {
slice_reader.ConsumeBits(sps_->log2_max_pic_order_cnt_lsb);
if (pps_->bottom_field_pic_order_in_frame_present_flag && !field_pic_flag) {
// delta_pic_order_cnt_bottom: se(v)
slice_reader.ReadExponentialGolomb();
}
}
if (sps_->pic_order_cnt_type == 1 &&
!sps_->delta_pic_order_always_zero_flag) {
// delta_pic_order_cnt[0]: se(v)
slice_reader.ReadExponentialGolomb();
if (pps_->bottom_field_pic_order_in_frame_present_flag && !field_pic_flag) {
// delta_pic_order_cnt[1]: se(v)
slice_reader.ReadExponentialGolomb();
}
}
if (pps_->redundant_pic_cnt_present_flag) {
// redundant_pic_cnt: ue(v)
slice_reader.ReadExponentialGolomb();
}
if (slice_type == H264::SliceType::kB) {
// direct_spatial_mv_pred_flag: u(1)
slice_reader.ConsumeBits(1);
}
switch (slice_type) {
case H264::SliceType::kP:
case H264::SliceType::kB:
case H264::SliceType::kSp:
// num_ref_idx_active_override_flag: u(1)
if (slice_reader.Read<bool>()) {
// num_ref_idx_l0_active_minus1: ue(v)
slice_reader.ReadExponentialGolomb();
if (slice_type == H264::SliceType::kB) {
// num_ref_idx_l1_active_minus1: ue(v)
slice_reader.ReadExponentialGolomb();
}
}
break;
default:
break;
}
if (!slice_reader.Ok()) {
return kInvalidStream;
}
// assume nal_unit_type != 20 && nal_unit_type != 21:
if (nalu_type == 20 || nalu_type == 21) {
RTC_LOG(LS_ERROR) << "Unsupported nal unit type.";
return kUnsupportedStream;
}
// if (nal_unit_type == 20 || nal_unit_type == 21)
// ref_pic_list_mvc_modification()
// else
{
// ref_pic_list_modification():
// `slice_type` checks here don't use named constants as they aren't named
// in the spec for this segment. Keeping them consistent makes it easier to
// verify that they are both the same.
if (slice_type % 5 != 2 && slice_type % 5 != 4) {
// ref_pic_list_modification_flag_l0: u(1)
if (slice_reader.Read<bool>()) {
uint32_t modification_of_pic_nums_idc;
do {
// modification_of_pic_nums_idc: ue(v)
modification_of_pic_nums_idc = slice_reader.ReadExponentialGolomb();
if (modification_of_pic_nums_idc == 0 ||
modification_of_pic_nums_idc == 1) {
// abs_diff_pic_num_minus1: ue(v)
slice_reader.ReadExponentialGolomb();
} else if (modification_of_pic_nums_idc == 2) {
// long_term_pic_num: ue(v)
slice_reader.ReadExponentialGolomb();
}
} while (modification_of_pic_nums_idc != 3 && slice_reader.Ok());
}
}
if (slice_type % 5 == 1) {
// ref_pic_list_modification_flag_l1: u(1)
if (slice_reader.Read<bool>()) {
uint32_t modification_of_pic_nums_idc;
do {
// modification_of_pic_nums_idc: ue(v)
modification_of_pic_nums_idc = slice_reader.ReadExponentialGolomb();
if (modification_of_pic_nums_idc == 0 ||
modification_of_pic_nums_idc == 1) {
// abs_diff_pic_num_minus1: ue(v)
slice_reader.ReadExponentialGolomb();
} else if (modification_of_pic_nums_idc == 2) {
// long_term_pic_num: ue(v)
slice_reader.ReadExponentialGolomb();
}
} while (modification_of_pic_nums_idc != 3 && slice_reader.Ok());
}
}
}
if (!slice_reader.Ok()) {
return kInvalidStream;
}
// TODO(pbos): Do we need support for pred_weight_table()?
if ((pps_->weighted_pred_flag && (slice_type == H264::SliceType::kP ||
slice_type == H264::SliceType::kSp)) ||
(pps_->weighted_bipred_idc == 1 && slice_type == H264::SliceType::kB)) {
RTC_LOG(LS_ERROR) << "Streams with pred_weight_table unsupported.";
return kUnsupportedStream;
}
// if ((weighted_pred_flag && (slice_type == P || slice_type == SP)) ||
// (weighted_bipred_idc == 1 && slice_type == B)) {
// pred_weight_table()
// }
if (nal_ref_idc != 0) {
// dec_ref_pic_marking():
if (is_idr) {
// no_output_of_prior_pics_flag: u(1)
// long_term_reference_flag: u(1)
slice_reader.ConsumeBits(2);
} else {
// adaptive_ref_pic_marking_mode_flag: u(1)
if (slice_reader.Read<bool>()) {
uint32_t memory_management_control_operation;
do {
// memory_management_control_operation: ue(v)
memory_management_control_operation =
slice_reader.ReadExponentialGolomb();
if (memory_management_control_operation == 1 ||
memory_management_control_operation == 3) {
// difference_of_pic_nums_minus1: ue(v)
slice_reader.ReadExponentialGolomb();
}
if (memory_management_control_operation == 2) {
// long_term_pic_num: ue(v)
slice_reader.ReadExponentialGolomb();
}
if (memory_management_control_operation == 3 ||
memory_management_control_operation == 6) {
// long_term_frame_idx: ue(v)
slice_reader.ReadExponentialGolomb();
}
if (memory_management_control_operation == 4) {
// max_long_term_frame_idx_plus1: ue(v)
slice_reader.ReadExponentialGolomb();
}
} while (memory_management_control_operation != 0 && slice_reader.Ok());
}
}
}
if (pps_->entropy_coding_mode_flag && slice_type != H264::SliceType::kI &&
slice_type != H264::SliceType::kSi) {
// cabac_init_idc: ue(v)
slice_reader.ReadExponentialGolomb();
}
int last_slice_qp_delta = slice_reader.ReadSignedExponentialGolomb();
if (!slice_reader.Ok()) {
return kInvalidStream;
}
if (abs(last_slice_qp_delta) > kMaxAbsQpDeltaValue) {
// Something has gone wrong, and the parsed value is invalid.
RTC_LOG(LS_WARNING) << "Parsed QP value out of range.";
return kInvalidStream;
}
last_slice_qp_delta_ = last_slice_qp_delta;
return kOk;
}
void H264BitstreamParser::ParseSlice(const uint8_t* slice, size_t length) {
H264::NaluType nalu_type = H264::ParseNaluType(slice[0]);
switch (nalu_type) {
case H264::NaluType::kSps: {
sps_ = SpsParser::ParseSps(slice + H264::kNaluTypeSize,
length - H264::kNaluTypeSize);
if (!sps_)
RTC_DLOG(LS_WARNING) << "Unable to parse SPS from H264 bitstream.";
break;
}
case H264::NaluType::kPps: {
pps_ = PpsParser::ParsePps(slice + H264::kNaluTypeSize,
length - H264::kNaluTypeSize);
if (!pps_)
RTC_DLOG(LS_WARNING) << "Unable to parse PPS from H264 bitstream.";
break;
}
case H264::NaluType::kAud:
case H264::NaluType::kSei:
case H264::NaluType::kPrefix:
break; // Ignore these nalus, as we don't care about their contents.
default:
Result res = ParseNonParameterSetNalu(slice, length, nalu_type);
if (res != kOk)
RTC_DLOG(LS_INFO) << "Failed to parse bitstream. Error: " << res;
break;
}
}
void H264BitstreamParser::ParseBitstream(
rtc::ArrayView<const uint8_t> bitstream) {
std::vector<H264::NaluIndex> nalu_indices =
H264::FindNaluIndices(bitstream.data(), bitstream.size());
for (const H264::NaluIndex& index : nalu_indices)
ParseSlice(bitstream.data() + index.payload_start_offset,
index.payload_size);
}
absl::optional<int> H264BitstreamParser::GetLastSliceQp() const {
if (!last_slice_qp_delta_ || !pps_)
return absl::nullopt;
const int qp = 26 + pps_->pic_init_qp_minus26 + *last_slice_qp_delta_;
if (qp < kMinQpValue || qp > kMaxQpValue) {
RTC_LOG(LS_ERROR) << "Parsed invalid QP from bitstream.";
return absl::nullopt;
}
return qp;
}
} // namespace webrtc

View file

@ -0,0 +1,58 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef COMMON_VIDEO_H264_H264_BITSTREAM_PARSER_H_
#define COMMON_VIDEO_H264_H264_BITSTREAM_PARSER_H_
#include <stddef.h>
#include <stdint.h>
#include "absl/types/optional.h"
#include "api/video_codecs/bitstream_parser.h"
#include "common_video/h264/pps_parser.h"
#include "common_video/h264/sps_parser.h"
namespace webrtc {
// Stateful H264 bitstream parser (due to SPS/PPS). Used to parse out QP values
// from the bitstream.
// TODO(pbos): Unify with RTP SPS parsing and only use one H264 parser.
// TODO(pbos): If/when this gets used on the receiver side CHECKs must be
// removed and gracefully abort as we have no control over receive-side
// bitstreams.
class H264BitstreamParser : public BitstreamParser {
public:
H264BitstreamParser();
~H264BitstreamParser() override;
void ParseBitstream(rtc::ArrayView<const uint8_t> bitstream) override;
absl::optional<int> GetLastSliceQp() const override;
protected:
enum Result {
kOk,
kInvalidStream,
kUnsupportedStream,
};
void ParseSlice(const uint8_t* slice, size_t length);
Result ParseNonParameterSetNalu(const uint8_t* source,
size_t source_length,
uint8_t nalu_type);
// SPS/PPS state, updated when parsing new SPS/PPS, used to parse slices.
absl::optional<SpsParser::SpsState> sps_;
absl::optional<PpsParser::PpsState> pps_;
// Last parsed slice QP.
absl::optional<int32_t> last_slice_qp_delta_;
};
} // namespace webrtc
#endif // COMMON_VIDEO_H264_H264_BITSTREAM_PARSER_H_

View file

@ -0,0 +1,116 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "common_video/h264/h264_common.h"
#include <cstdint>
namespace webrtc {
namespace H264 {
const uint8_t kNaluTypeMask = 0x1F;
std::vector<NaluIndex> FindNaluIndices(const uint8_t* buffer,
size_t buffer_size) {
// This is sorta like Boyer-Moore, but with only the first optimization step:
// given a 3-byte sequence we're looking at, if the 3rd byte isn't 1 or 0,
// skip ahead to the next 3-byte sequence. 0s and 1s are relatively rare, so
// this will skip the majority of reads/checks.
std::vector<NaluIndex> sequences;
if (buffer_size < kNaluShortStartSequenceSize)
return sequences;
static_assert(kNaluShortStartSequenceSize >= 2,
"kNaluShortStartSequenceSize must be larger or equals to 2");
const size_t end = buffer_size - kNaluShortStartSequenceSize;
for (size_t i = 0; i < end;) {
if (buffer[i + 2] > 1) {
i += 3;
} else if (buffer[i + 2] == 1) {
if (buffer[i + 1] == 0 && buffer[i] == 0) {
// We found a start sequence, now check if it was a 3 of 4 byte one.
NaluIndex index = {i, i + 3, 0};
if (index.start_offset > 0 && buffer[index.start_offset - 1] == 0)
--index.start_offset;
// Update length of previous entry.
auto it = sequences.rbegin();
if (it != sequences.rend())
it->payload_size = index.start_offset - it->payload_start_offset;
sequences.push_back(index);
}
i += 3;
} else {
++i;
}
}
// Update length of last entry, if any.
auto it = sequences.rbegin();
if (it != sequences.rend())
it->payload_size = buffer_size - it->payload_start_offset;
return sequences;
}
NaluType ParseNaluType(uint8_t data) {
return static_cast<NaluType>(data & kNaluTypeMask);
}
std::vector<uint8_t> ParseRbsp(const uint8_t* data, size_t length) {
std::vector<uint8_t> out;
out.reserve(length);
for (size_t i = 0; i < length;) {
// Be careful about over/underflow here. byte_length_ - 3 can underflow, and
// i + 3 can overflow, but byte_length_ - i can't, because i < byte_length_
// above, and that expression will produce the number of bytes left in
// the stream including the byte at i.
if (length - i >= 3 && !data[i] && !data[i + 1] && data[i + 2] == 3) {
// Two rbsp bytes.
out.push_back(data[i++]);
out.push_back(data[i++]);
// Skip the emulation byte.
i++;
} else {
// Single rbsp byte.
out.push_back(data[i++]);
}
}
return out;
}
void WriteRbsp(const uint8_t* bytes, size_t length, rtc::Buffer* destination) {
static const uint8_t kZerosInStartSequence = 2;
static const uint8_t kEmulationByte = 0x03u;
size_t num_consecutive_zeros = 0;
destination->EnsureCapacity(destination->size() + length);
for (size_t i = 0; i < length; ++i) {
uint8_t byte = bytes[i];
if (byte <= kEmulationByte &&
num_consecutive_zeros >= kZerosInStartSequence) {
// Need to escape.
destination->AppendData(kEmulationByte);
num_consecutive_zeros = 0;
}
destination->AppendData(byte);
if (byte == 0) {
++num_consecutive_zeros;
} else {
num_consecutive_zeros = 0;
}
}
}
} // namespace H264
} // namespace webrtc

View file

@ -0,0 +1,92 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef COMMON_VIDEO_H264_H264_COMMON_H_
#define COMMON_VIDEO_H264_H264_COMMON_H_
#include <stddef.h>
#include <stdint.h>
#include <vector>
#include "rtc_base/buffer.h"
#include "rtc_base/system/rtc_export.h"
namespace webrtc {
namespace H264 {
// The size of a full NALU start sequence {0 0 0 1}, used for the first NALU
// of an access unit, and for SPS and PPS blocks.
const size_t kNaluLongStartSequenceSize = 4;
// The size of a shortened NALU start sequence {0 0 1}, that may be used if
// not the first NALU of an access unit or an SPS or PPS block.
const size_t kNaluShortStartSequenceSize = 3;
// The size of the NALU type byte (1).
const size_t kNaluTypeSize = 1;
enum NaluType : uint8_t {
kSlice = 1,
kIdr = 5,
kSei = 6,
kSps = 7,
kPps = 8,
kAud = 9,
kEndOfSequence = 10,
kEndOfStream = 11,
kFiller = 12,
kPrefix = 14,
kStapA = 24,
kFuA = 28
};
enum SliceType : uint8_t { kP = 0, kB = 1, kI = 2, kSp = 3, kSi = 4 };
struct NaluIndex {
// Start index of NALU, including start sequence.
size_t start_offset;
// Start index of NALU payload, typically type header.
size_t payload_start_offset;
// Length of NALU payload, in bytes, counting from payload_start_offset.
size_t payload_size;
};
// Returns a vector of the NALU indices in the given buffer.
RTC_EXPORT std::vector<NaluIndex> FindNaluIndices(const uint8_t* buffer,
size_t buffer_size);
// Get the NAL type from the header byte immediately following start sequence.
RTC_EXPORT NaluType ParseNaluType(uint8_t data);
// Methods for parsing and writing RBSP. See section 7.4.1 of the H264 spec.
//
// The following sequences are illegal, and need to be escaped when encoding:
// 00 00 00 -> 00 00 03 00
// 00 00 01 -> 00 00 03 01
// 00 00 02 -> 00 00 03 02
// And things in the source that look like the emulation byte pattern (00 00 03)
// need to have an extra emulation byte added, so it's removed when decoding:
// 00 00 03 -> 00 00 03 03
//
// Decoding is simply a matter of finding any 00 00 03 sequence and removing
// the 03 emulation byte.
// Parse the given data and remove any emulation byte escaping.
std::vector<uint8_t> ParseRbsp(const uint8_t* data, size_t length);
// Write the given data to the destination buffer, inserting and emulation
// bytes in order to escape any data the could be interpreted as a start
// sequence.
void WriteRbsp(const uint8_t* bytes, size_t length, rtc::Buffer* destination);
} // namespace H264
} // namespace webrtc
#endif // COMMON_VIDEO_H264_H264_COMMON_H_

View file

@ -0,0 +1,160 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "common_video/h264/pps_parser.h"
#include <cstdint>
#include <limits>
#include <vector>
#include "absl/numeric/bits.h"
#include "common_video/h264/h264_common.h"
#include "rtc_base/bitstream_reader.h"
#include "rtc_base/checks.h"
namespace webrtc {
namespace {
constexpr int kMaxPicInitQpDeltaValue = 25;
constexpr int kMinPicInitQpDeltaValue = -26;
} // namespace
// General note: this is based off the 02/2014 version of the H.264 standard.
// You can find it on this page:
// http://www.itu.int/rec/T-REC-H.264
absl::optional<PpsParser::PpsState> PpsParser::ParsePps(const uint8_t* data,
size_t length) {
// First, parse out rbsp, which is basically the source buffer minus emulation
// bytes (the last byte of a 0x00 0x00 0x03 sequence). RBSP is defined in
// section 7.3.1 of the H.264 standard.
return ParseInternal(H264::ParseRbsp(data, length));
}
bool PpsParser::ParsePpsIds(const uint8_t* data,
size_t length,
uint32_t* pps_id,
uint32_t* sps_id) {
RTC_DCHECK(pps_id);
RTC_DCHECK(sps_id);
// First, parse out rbsp, which is basically the source buffer minus emulation
// bytes (the last byte of a 0x00 0x00 0x03 sequence). RBSP is defined in
// section 7.3.1 of the H.264 standard.
std::vector<uint8_t> unpacked_buffer = H264::ParseRbsp(data, length);
BitstreamReader reader(unpacked_buffer);
*pps_id = reader.ReadExponentialGolomb();
*sps_id = reader.ReadExponentialGolomb();
return reader.Ok();
}
absl::optional<uint32_t> PpsParser::ParsePpsIdFromSlice(const uint8_t* data,
size_t length) {
std::vector<uint8_t> unpacked_buffer = H264::ParseRbsp(data, length);
BitstreamReader slice_reader(unpacked_buffer);
// first_mb_in_slice: ue(v)
slice_reader.ReadExponentialGolomb();
// slice_type: ue(v)
slice_reader.ReadExponentialGolomb();
// pic_parameter_set_id: ue(v)
uint32_t slice_pps_id = slice_reader.ReadExponentialGolomb();
if (!slice_reader.Ok()) {
return absl::nullopt;
}
return slice_pps_id;
}
absl::optional<PpsParser::PpsState> PpsParser::ParseInternal(
rtc::ArrayView<const uint8_t> buffer) {
BitstreamReader reader(buffer);
PpsState pps;
pps.id = reader.ReadExponentialGolomb();
pps.sps_id = reader.ReadExponentialGolomb();
// entropy_coding_mode_flag: u(1)
pps.entropy_coding_mode_flag = reader.Read<bool>();
// bottom_field_pic_order_in_frame_present_flag: u(1)
pps.bottom_field_pic_order_in_frame_present_flag = reader.Read<bool>();
// num_slice_groups_minus1: ue(v)
uint32_t num_slice_groups_minus1 = reader.ReadExponentialGolomb();
if (num_slice_groups_minus1 > 0) {
// slice_group_map_type: ue(v)
uint32_t slice_group_map_type = reader.ReadExponentialGolomb();
if (slice_group_map_type == 0) {
for (uint32_t i_group = 0;
i_group <= num_slice_groups_minus1 && reader.Ok(); ++i_group) {
// run_length_minus1[iGroup]: ue(v)
reader.ReadExponentialGolomb();
}
} else if (slice_group_map_type == 1) {
// TODO(sprang): Implement support for dispersed slice group map type.
// See 8.2.2.2 Specification for dispersed slice group map type.
} else if (slice_group_map_type == 2) {
for (uint32_t i_group = 0;
i_group <= num_slice_groups_minus1 && reader.Ok(); ++i_group) {
// top_left[iGroup]: ue(v)
reader.ReadExponentialGolomb();
// bottom_right[iGroup]: ue(v)
reader.ReadExponentialGolomb();
}
} else if (slice_group_map_type == 3 || slice_group_map_type == 4 ||
slice_group_map_type == 5) {
// slice_group_change_direction_flag: u(1)
reader.ConsumeBits(1);
// slice_group_change_rate_minus1: ue(v)
reader.ReadExponentialGolomb();
} else if (slice_group_map_type == 6) {
// pic_size_in_map_units_minus1: ue(v)
uint32_t pic_size_in_map_units = reader.ReadExponentialGolomb() + 1;
int slice_group_id_bits = 1 + absl::bit_width(num_slice_groups_minus1);
// slice_group_id: array of size pic_size_in_map_units, each element
// is represented by ceil(log2(num_slice_groups_minus1 + 1)) bits.
int64_t bits_to_consume =
int64_t{slice_group_id_bits} * pic_size_in_map_units;
if (!reader.Ok() || bits_to_consume > std::numeric_limits<int>::max()) {
return absl::nullopt;
}
reader.ConsumeBits(bits_to_consume);
}
}
// num_ref_idx_l0_default_active_minus1: ue(v)
reader.ReadExponentialGolomb();
// num_ref_idx_l1_default_active_minus1: ue(v)
reader.ReadExponentialGolomb();
// weighted_pred_flag: u(1)
pps.weighted_pred_flag = reader.Read<bool>();
// weighted_bipred_idc: u(2)
pps.weighted_bipred_idc = reader.ReadBits(2);
// pic_init_qp_minus26: se(v)
pps.pic_init_qp_minus26 = reader.ReadSignedExponentialGolomb();
// Sanity-check parsed value
if (!reader.Ok() || pps.pic_init_qp_minus26 > kMaxPicInitQpDeltaValue ||
pps.pic_init_qp_minus26 < kMinPicInitQpDeltaValue) {
return absl::nullopt;
}
// pic_init_qs_minus26: se(v)
reader.ReadExponentialGolomb();
// chroma_qp_index_offset: se(v)
reader.ReadExponentialGolomb();
// deblocking_filter_control_present_flag: u(1)
// constrained_intra_pred_flag: u(1)
reader.ConsumeBits(2);
// redundant_pic_cnt_present_flag: u(1)
pps.redundant_pic_cnt_present_flag = reader.ReadBit();
if (!reader.Ok()) {
return absl::nullopt;
}
return pps;
}
} // namespace webrtc

View file

@ -0,0 +1,60 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef COMMON_VIDEO_H264_PPS_PARSER_H_
#define COMMON_VIDEO_H264_PPS_PARSER_H_
#include <stddef.h>
#include <stdint.h>
#include "absl/types/optional.h"
#include "api/array_view.h"
namespace webrtc {
// A class for parsing out picture parameter set (PPS) data from a H264 NALU.
class PpsParser {
public:
// The parsed state of the PPS. Only some select values are stored.
// Add more as they are actually needed.
struct PpsState {
PpsState() = default;
bool bottom_field_pic_order_in_frame_present_flag = false;
bool weighted_pred_flag = false;
bool entropy_coding_mode_flag = false;
uint32_t weighted_bipred_idc = false;
uint32_t redundant_pic_cnt_present_flag = 0;
int pic_init_qp_minus26 = 0;
uint32_t id = 0;
uint32_t sps_id = 0;
};
// Unpack RBSP and parse PPS state from the supplied buffer.
static absl::optional<PpsState> ParsePps(const uint8_t* data, size_t length);
static bool ParsePpsIds(const uint8_t* data,
size_t length,
uint32_t* pps_id,
uint32_t* sps_id);
static absl::optional<uint32_t> ParsePpsIdFromSlice(const uint8_t* data,
size_t length);
protected:
// Parse the PPS state, for a buffer where RBSP decoding has already been
// performed.
static absl::optional<PpsState> ParseInternal(
rtc::ArrayView<const uint8_t> buffer);
};
} // namespace webrtc
#endif // COMMON_VIDEO_H264_PPS_PARSER_H_

View file

@ -0,0 +1,227 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "common_video/h264/sps_parser.h"
#include <cstdint>
#include <vector>
#include "common_video/h264/h264_common.h"
#include "rtc_base/bitstream_reader.h"
namespace {
constexpr int kScalingDeltaMin = -128;
constexpr int kScaldingDeltaMax = 127;
} // namespace
namespace webrtc {
SpsParser::SpsState::SpsState() = default;
SpsParser::SpsState::SpsState(const SpsState&) = default;
SpsParser::SpsState::~SpsState() = default;
// General note: this is based off the 02/2014 version of the H.264 standard.
// You can find it on this page:
// http://www.itu.int/rec/T-REC-H.264
// Unpack RBSP and parse SPS state from the supplied buffer.
absl::optional<SpsParser::SpsState> SpsParser::ParseSps(const uint8_t* data,
size_t length) {
std::vector<uint8_t> unpacked_buffer = H264::ParseRbsp(data, length);
BitstreamReader reader(unpacked_buffer);
return ParseSpsUpToVui(reader);
}
absl::optional<SpsParser::SpsState> SpsParser::ParseSpsUpToVui(
BitstreamReader& reader) {
// Now, we need to use a bitstream reader to parse through the actual AVC SPS
// format. See Section 7.3.2.1.1 ("Sequence parameter set data syntax") of the
// H.264 standard for a complete description.
// Since we only care about resolution, we ignore the majority of fields, but
// we still have to actively parse through a lot of the data, since many of
// the fields have variable size.
// We're particularly interested in:
// chroma_format_idc -> affects crop units
// pic_{width,height}_* -> resolution of the frame in macroblocks (16x16).
// frame_crop_*_offset -> crop information
SpsState sps;
// chroma_format_idc will be ChromaArrayType if separate_colour_plane_flag is
// 0. It defaults to 1, when not specified.
uint32_t chroma_format_idc = 1;
// profile_idc: u(8). We need it to determine if we need to read/skip chroma
// formats.
uint8_t profile_idc = reader.Read<uint8_t>();
// constraint_set0_flag through constraint_set5_flag + reserved_zero_2bits
// 1 bit each for the flags + 2 bits + 8 bits for level_idc = 16 bits.
reader.ConsumeBits(16);
// seq_parameter_set_id: ue(v)
sps.id = reader.ReadExponentialGolomb();
sps.separate_colour_plane_flag = 0;
// See if profile_idc has chroma format information.
if (profile_idc == 100 || profile_idc == 110 || profile_idc == 122 ||
profile_idc == 244 || profile_idc == 44 || profile_idc == 83 ||
profile_idc == 86 || profile_idc == 118 || profile_idc == 128 ||
profile_idc == 138 || profile_idc == 139 || profile_idc == 134) {
// chroma_format_idc: ue(v)
chroma_format_idc = reader.ReadExponentialGolomb();
if (chroma_format_idc == 3) {
// separate_colour_plane_flag: u(1)
sps.separate_colour_plane_flag = reader.ReadBit();
}
// bit_depth_luma_minus8: ue(v)
reader.ReadExponentialGolomb();
// bit_depth_chroma_minus8: ue(v)
reader.ReadExponentialGolomb();
// qpprime_y_zero_transform_bypass_flag: u(1)
reader.ConsumeBits(1);
// seq_scaling_matrix_present_flag: u(1)
if (reader.Read<bool>()) {
// Process the scaling lists just enough to be able to properly
// skip over them, so we can still read the resolution on streams
// where this is included.
int scaling_list_count = (chroma_format_idc == 3 ? 12 : 8);
for (int i = 0; i < scaling_list_count; ++i) {
// seq_scaling_list_present_flag[i] : u(1)
if (reader.Read<bool>()) {
int last_scale = 8;
int next_scale = 8;
int size_of_scaling_list = i < 6 ? 16 : 64;
for (int j = 0; j < size_of_scaling_list; j++) {
if (next_scale != 0) {
// delta_scale: se(v)
int delta_scale = reader.ReadSignedExponentialGolomb();
if (!reader.Ok() || delta_scale < kScalingDeltaMin ||
delta_scale > kScaldingDeltaMax) {
return absl::nullopt;
}
next_scale = (last_scale + delta_scale + 256) % 256;
}
if (next_scale != 0)
last_scale = next_scale;
}
}
}
}
}
// log2_max_frame_num and log2_max_pic_order_cnt_lsb are used with
// BitstreamReader::ReadBits, which can read at most 64 bits at a time. We
// also have to avoid overflow when adding 4 to the on-wire golomb value,
// e.g., for evil input data, ReadExponentialGolomb might return 0xfffc.
const uint32_t kMaxLog2Minus4 = 12;
// log2_max_frame_num_minus4: ue(v)
uint32_t log2_max_frame_num_minus4 = reader.ReadExponentialGolomb();
if (!reader.Ok() || log2_max_frame_num_minus4 > kMaxLog2Minus4) {
return absl::nullopt;
}
sps.log2_max_frame_num = log2_max_frame_num_minus4 + 4;
// pic_order_cnt_type: ue(v)
sps.pic_order_cnt_type = reader.ReadExponentialGolomb();
if (sps.pic_order_cnt_type == 0) {
// log2_max_pic_order_cnt_lsb_minus4: ue(v)
uint32_t log2_max_pic_order_cnt_lsb_minus4 = reader.ReadExponentialGolomb();
if (!reader.Ok() || log2_max_pic_order_cnt_lsb_minus4 > kMaxLog2Minus4) {
return absl::nullopt;
}
sps.log2_max_pic_order_cnt_lsb = log2_max_pic_order_cnt_lsb_minus4 + 4;
} else if (sps.pic_order_cnt_type == 1) {
// delta_pic_order_always_zero_flag: u(1)
sps.delta_pic_order_always_zero_flag = reader.ReadBit();
// offset_for_non_ref_pic: se(v)
reader.ReadExponentialGolomb();
// offset_for_top_to_bottom_field: se(v)
reader.ReadExponentialGolomb();
// num_ref_frames_in_pic_order_cnt_cycle: ue(v)
uint32_t num_ref_frames_in_pic_order_cnt_cycle =
reader.ReadExponentialGolomb();
for (size_t i = 0; i < num_ref_frames_in_pic_order_cnt_cycle; ++i) {
// offset_for_ref_frame[i]: se(v)
reader.ReadExponentialGolomb();
if (!reader.Ok()) {
return absl::nullopt;
}
}
}
// max_num_ref_frames: ue(v)
sps.max_num_ref_frames = reader.ReadExponentialGolomb();
// gaps_in_frame_num_value_allowed_flag: u(1)
reader.ConsumeBits(1);
//
// IMPORTANT ONES! Now we're getting to resolution. First we read the pic
// width/height in macroblocks (16x16), which gives us the base resolution,
// and then we continue on until we hit the frame crop offsets, which are used
// to signify resolutions that aren't multiples of 16.
//
// pic_width_in_mbs_minus1: ue(v)
sps.width = 16 * (reader.ReadExponentialGolomb() + 1);
// pic_height_in_map_units_minus1: ue(v)
uint32_t pic_height_in_map_units_minus1 = reader.ReadExponentialGolomb();
// frame_mbs_only_flag: u(1)
sps.frame_mbs_only_flag = reader.ReadBit();
if (!sps.frame_mbs_only_flag) {
// mb_adaptive_frame_field_flag: u(1)
reader.ConsumeBits(1);
}
sps.height =
16 * (2 - sps.frame_mbs_only_flag) * (pic_height_in_map_units_minus1 + 1);
// direct_8x8_inference_flag: u(1)
reader.ConsumeBits(1);
//
// MORE IMPORTANT ONES! Now we're at the frame crop information.
//
uint32_t frame_crop_left_offset = 0;
uint32_t frame_crop_right_offset = 0;
uint32_t frame_crop_top_offset = 0;
uint32_t frame_crop_bottom_offset = 0;
// frame_cropping_flag: u(1)
if (reader.Read<bool>()) {
// frame_crop_{left, right, top, bottom}_offset: ue(v)
frame_crop_left_offset = reader.ReadExponentialGolomb();
frame_crop_right_offset = reader.ReadExponentialGolomb();
frame_crop_top_offset = reader.ReadExponentialGolomb();
frame_crop_bottom_offset = reader.ReadExponentialGolomb();
}
// vui_parameters_present_flag: u(1)
sps.vui_params_present = reader.ReadBit();
// Far enough! We don't use the rest of the SPS.
if (!reader.Ok()) {
return absl::nullopt;
}
// Figure out the crop units in pixels. That's based on the chroma format's
// sampling, which is indicated by chroma_format_idc.
if (sps.separate_colour_plane_flag || chroma_format_idc == 0) {
frame_crop_bottom_offset *= (2 - sps.frame_mbs_only_flag);
frame_crop_top_offset *= (2 - sps.frame_mbs_only_flag);
} else if (!sps.separate_colour_plane_flag && chroma_format_idc > 0) {
// Width multipliers for formats 1 (4:2:0) and 2 (4:2:2).
if (chroma_format_idc == 1 || chroma_format_idc == 2) {
frame_crop_left_offset *= 2;
frame_crop_right_offset *= 2;
}
// Height multipliers for format 1 (4:2:0).
if (chroma_format_idc == 1) {
frame_crop_top_offset *= 2;
frame_crop_bottom_offset *= 2;
}
}
// Subtract the crop for each dimension.
sps.width -= (frame_crop_left_offset + frame_crop_right_offset);
sps.height -= (frame_crop_top_offset + frame_crop_bottom_offset);
return sps;
}
} // namespace webrtc

View file

@ -0,0 +1,53 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef COMMON_VIDEO_H264_SPS_PARSER_H_
#define COMMON_VIDEO_H264_SPS_PARSER_H_
#include "absl/types/optional.h"
#include "rtc_base/bitstream_reader.h"
#include "rtc_base/system/rtc_export.h"
namespace webrtc {
// A class for parsing out sequence parameter set (SPS) data from an H264 NALU.
class RTC_EXPORT SpsParser {
public:
// The parsed state of the SPS. Only some select values are stored.
// Add more as they are actually needed.
struct RTC_EXPORT SpsState {
SpsState();
SpsState(const SpsState&);
~SpsState();
uint32_t width = 0;
uint32_t height = 0;
uint32_t delta_pic_order_always_zero_flag = 0;
uint32_t separate_colour_plane_flag = 0;
uint32_t frame_mbs_only_flag = 0;
uint32_t log2_max_frame_num = 4; // Smallest valid value.
uint32_t log2_max_pic_order_cnt_lsb = 4; // Smallest valid value.
uint32_t pic_order_cnt_type = 0;
uint32_t max_num_ref_frames = 0;
uint32_t vui_params_present = 0;
uint32_t id = 0;
};
// Unpack RBSP and parse SPS state from the supplied buffer.
static absl::optional<SpsState> ParseSps(const uint8_t* data, size_t length);
protected:
// Parse the SPS state, up till the VUI part, for a buffer where RBSP
// decoding has already been performed.
static absl::optional<SpsState> ParseSpsUpToVui(BitstreamReader& reader);
};
} // namespace webrtc
#endif // COMMON_VIDEO_H264_SPS_PARSER_H_

View file

@ -0,0 +1,611 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*
*/
#include "common_video/h264/sps_vui_rewriter.h"
#include <string.h>
#include <algorithm>
#include <cstdint>
#include <vector>
#include "api/video/color_space.h"
#include "common_video/h264/h264_common.h"
#include "common_video/h264/sps_parser.h"
#include "rtc_base/bit_buffer.h"
#include "rtc_base/bitstream_reader.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
#include "system_wrappers/include/metrics.h"
namespace webrtc {
namespace {
// The maximum expected growth from adding a VUI to the SPS. It's actually
// closer to 24 or so, but better safe than sorry.
const size_t kMaxVuiSpsIncrease = 64;
const char* kSpsValidHistogramName = "WebRTC.Video.H264.SpsValid";
enum SpsValidEvent {
kReceivedSpsVuiOk = 1,
kReceivedSpsRewritten = 2,
kReceivedSpsParseFailure = 3,
kSentSpsPocOk = 4,
kSentSpsVuiOk = 5,
kSentSpsRewritten = 6,
kSentSpsParseFailure = 7,
kSpsRewrittenMax = 8
};
#define RETURN_FALSE_ON_FAIL(x) \
do { \
if (!(x)) { \
RTC_LOG_F(LS_ERROR) << " (line:" << __LINE__ << ") FAILED: " #x; \
return false; \
} \
} while (0)
uint8_t CopyUInt8(BitstreamReader& source, rtc::BitBufferWriter& destination) {
uint8_t tmp = source.Read<uint8_t>();
if (!destination.WriteUInt8(tmp)) {
source.Invalidate();
}
return tmp;
}
uint32_t CopyExpGolomb(BitstreamReader& source,
rtc::BitBufferWriter& destination) {
uint32_t tmp = source.ReadExponentialGolomb();
if (!destination.WriteExponentialGolomb(tmp)) {
source.Invalidate();
}
return tmp;
}
uint32_t CopyBits(int bits,
BitstreamReader& source,
rtc::BitBufferWriter& destination) {
RTC_DCHECK_GT(bits, 0);
RTC_DCHECK_LE(bits, 32);
uint64_t tmp = source.ReadBits(bits);
if (!destination.WriteBits(tmp, bits)) {
source.Invalidate();
}
return tmp;
}
bool CopyAndRewriteVui(const SpsParser::SpsState& sps,
BitstreamReader& source,
rtc::BitBufferWriter& destination,
const webrtc::ColorSpace* color_space,
SpsVuiRewriter::ParseResult& out_vui_rewritten);
void CopyHrdParameters(BitstreamReader& source,
rtc::BitBufferWriter& destination);
bool AddBitstreamRestriction(rtc::BitBufferWriter* destination,
uint32_t max_num_ref_frames);
bool IsDefaultColorSpace(const ColorSpace& color_space);
bool AddVideoSignalTypeInfo(rtc::BitBufferWriter& destination,
const ColorSpace& color_space);
bool CopyOrRewriteVideoSignalTypeInfo(
BitstreamReader& source,
rtc::BitBufferWriter& destination,
const ColorSpace* color_space,
SpsVuiRewriter::ParseResult& out_vui_rewritten);
bool CopyRemainingBits(BitstreamReader& source,
rtc::BitBufferWriter& destination);
} // namespace
void SpsVuiRewriter::UpdateStats(ParseResult result, Direction direction) {
switch (result) {
case SpsVuiRewriter::ParseResult::kVuiRewritten:
RTC_HISTOGRAM_ENUMERATION(
kSpsValidHistogramName,
direction == SpsVuiRewriter::Direction::kIncoming
? SpsValidEvent::kReceivedSpsRewritten
: SpsValidEvent::kSentSpsRewritten,
SpsValidEvent::kSpsRewrittenMax);
break;
case SpsVuiRewriter::ParseResult::kVuiOk:
RTC_HISTOGRAM_ENUMERATION(
kSpsValidHistogramName,
direction == SpsVuiRewriter::Direction::kIncoming
? SpsValidEvent::kReceivedSpsVuiOk
: SpsValidEvent::kSentSpsVuiOk,
SpsValidEvent::kSpsRewrittenMax);
break;
case SpsVuiRewriter::ParseResult::kFailure:
RTC_HISTOGRAM_ENUMERATION(
kSpsValidHistogramName,
direction == SpsVuiRewriter::Direction::kIncoming
? SpsValidEvent::kReceivedSpsParseFailure
: SpsValidEvent::kSentSpsParseFailure,
SpsValidEvent::kSpsRewrittenMax);
break;
}
}
SpsVuiRewriter::ParseResult SpsVuiRewriter::ParseAndRewriteSps(
const uint8_t* buffer,
size_t length,
absl::optional<SpsParser::SpsState>* sps,
const webrtc::ColorSpace* color_space,
rtc::Buffer* destination) {
// Create temporary RBSP decoded buffer of the payload (exlcuding the
// leading nalu type header byte (the SpsParser uses only the payload).
std::vector<uint8_t> rbsp_buffer = H264::ParseRbsp(buffer, length);
BitstreamReader source_buffer(rbsp_buffer);
absl::optional<SpsParser::SpsState> sps_state =
SpsParser::ParseSpsUpToVui(source_buffer);
if (!sps_state)
return ParseResult::kFailure;
*sps = sps_state;
// We're going to completely muck up alignment, so we need a BitBufferWriter
// to write with.
rtc::Buffer out_buffer(length + kMaxVuiSpsIncrease);
rtc::BitBufferWriter sps_writer(out_buffer.data(), out_buffer.size());
// Check how far the SpsParser has read, and copy that data in bulk.
RTC_DCHECK(source_buffer.Ok());
size_t total_bit_offset =
rbsp_buffer.size() * 8 - source_buffer.RemainingBitCount();
size_t byte_offset = total_bit_offset / 8;
size_t bit_offset = total_bit_offset % 8;
memcpy(out_buffer.data(), rbsp_buffer.data(),
byte_offset + (bit_offset > 0 ? 1 : 0)); // OK to copy the last bits.
// SpsParser will have read the vui_params_present flag, which we want to
// modify, so back off a bit;
if (bit_offset == 0) {
--byte_offset;
bit_offset = 7;
} else {
--bit_offset;
}
sps_writer.Seek(byte_offset, bit_offset);
ParseResult vui_updated;
if (!CopyAndRewriteVui(*sps_state, source_buffer, sps_writer, color_space,
vui_updated)) {
RTC_LOG(LS_ERROR) << "Failed to parse/copy SPS VUI.";
return ParseResult::kFailure;
}
if (vui_updated == ParseResult::kVuiOk) {
// No update necessary after all, just return.
return vui_updated;
}
if (!CopyRemainingBits(source_buffer, sps_writer)) {
RTC_LOG(LS_ERROR) << "Failed to parse/copy SPS VUI.";
return ParseResult::kFailure;
}
// Pad up to next byte with zero bits.
sps_writer.GetCurrentOffset(&byte_offset, &bit_offset);
if (bit_offset > 0) {
sps_writer.WriteBits(0, 8 - bit_offset);
++byte_offset;
bit_offset = 0;
}
RTC_DCHECK(byte_offset <= length + kMaxVuiSpsIncrease);
RTC_CHECK(destination != nullptr);
out_buffer.SetSize(byte_offset);
// Write updates SPS to destination with added RBSP
H264::WriteRbsp(out_buffer.data(), out_buffer.size(), destination);
return ParseResult::kVuiRewritten;
}
SpsVuiRewriter::ParseResult SpsVuiRewriter::ParseAndRewriteSps(
const uint8_t* buffer,
size_t length,
absl::optional<SpsParser::SpsState>* sps,
const webrtc::ColorSpace* color_space,
rtc::Buffer* destination,
Direction direction) {
ParseResult result =
ParseAndRewriteSps(buffer, length, sps, color_space, destination);
UpdateStats(result, direction);
return result;
}
rtc::Buffer SpsVuiRewriter::ParseOutgoingBitstreamAndRewrite(
rtc::ArrayView<const uint8_t> buffer,
const webrtc::ColorSpace* color_space) {
std::vector<H264::NaluIndex> nalus =
H264::FindNaluIndices(buffer.data(), buffer.size());
// Allocate some extra space for potentially adding a missing VUI.
rtc::Buffer output_buffer(/*size=*/0, /*capacity=*/buffer.size() +
nalus.size() * kMaxVuiSpsIncrease);
for (const H264::NaluIndex& nalu : nalus) {
// Copy NAL unit start code.
const uint8_t* start_code_ptr = buffer.data() + nalu.start_offset;
const size_t start_code_length =
nalu.payload_start_offset - nalu.start_offset;
const uint8_t* nalu_ptr = buffer.data() + nalu.payload_start_offset;
const size_t nalu_length = nalu.payload_size;
if (H264::ParseNaluType(nalu_ptr[0]) == H264::NaluType::kSps) {
// Check if stream uses picture order count type 0, and if so rewrite it
// to enable faster decoding. Streams in that format incur additional
// delay because it allows decode order to differ from render order.
// The mechanism used is to rewrite (edit or add) the SPS's VUI to contain
// restrictions on the maximum number of reordered pictures. This reduces
// latency significantly, though it still adds about a frame of latency to
// decoding.
// Note that we do this rewriting both here (send side, in order to
// protect legacy receive clients) in RtpDepacketizerH264::ParseSingleNalu
// (receive side, in orderer to protect us from unknown or legacy send
// clients).
absl::optional<SpsParser::SpsState> sps;
rtc::Buffer output_nalu;
// Add the type header to the output buffer first, so that the rewriter
// can append modified payload on top of that.
output_nalu.AppendData(nalu_ptr[0]);
ParseResult result = ParseAndRewriteSps(
nalu_ptr + H264::kNaluTypeSize, nalu_length - H264::kNaluTypeSize,
&sps, color_space, &output_nalu, Direction::kOutgoing);
if (result == ParseResult::kVuiRewritten) {
output_buffer.AppendData(start_code_ptr, start_code_length);
output_buffer.AppendData(output_nalu.data(), output_nalu.size());
continue;
}
} else if (H264::ParseNaluType(nalu_ptr[0]) == H264::NaluType::kAud) {
// Skip the access unit delimiter copy.
continue;
}
// vui wasn't rewritten and it is not aud, copy the nal unit as is.
output_buffer.AppendData(start_code_ptr, start_code_length);
output_buffer.AppendData(nalu_ptr, nalu_length);
}
return output_buffer;
}
namespace {
bool CopyAndRewriteVui(const SpsParser::SpsState& sps,
BitstreamReader& source,
rtc::BitBufferWriter& destination,
const webrtc::ColorSpace* color_space,
SpsVuiRewriter::ParseResult& out_vui_rewritten) {
out_vui_rewritten = SpsVuiRewriter::ParseResult::kVuiOk;
//
// vui_parameters_present_flag: u(1)
//
RETURN_FALSE_ON_FAIL(destination.WriteBits(1, 1));
// ********* IMPORTANT! **********
// Now we're at the VUI, so we want to (1) add it if it isn't present, and
// (2) rewrite frame reordering values so no reordering is allowed.
if (!sps.vui_params_present) {
// Write a simple VUI with the parameters we want and 0 for all other flags.
// aspect_ratio_info_present_flag, overscan_info_present_flag. Both u(1).
RETURN_FALSE_ON_FAIL(destination.WriteBits(0, 2));
uint32_t video_signal_type_present_flag =
(color_space && !IsDefaultColorSpace(*color_space)) ? 1 : 0;
RETURN_FALSE_ON_FAIL(
destination.WriteBits(video_signal_type_present_flag, 1));
if (video_signal_type_present_flag) {
RETURN_FALSE_ON_FAIL(AddVideoSignalTypeInfo(destination, *color_space));
}
// chroma_loc_info_present_flag, timing_info_present_flag,
// nal_hrd_parameters_present_flag, vcl_hrd_parameters_present_flag,
// pic_struct_present_flag, All u(1)
RETURN_FALSE_ON_FAIL(destination.WriteBits(0, 5));
// bitstream_restriction_flag: u(1)
RETURN_FALSE_ON_FAIL(destination.WriteBits(1, 1));
RETURN_FALSE_ON_FAIL(
AddBitstreamRestriction(&destination, sps.max_num_ref_frames));
out_vui_rewritten = SpsVuiRewriter::ParseResult::kVuiRewritten;
} else {
// Parse out the full VUI.
// aspect_ratio_info_present_flag: u(1)
uint32_t aspect_ratio_info_present_flag = CopyBits(1, source, destination);
if (aspect_ratio_info_present_flag) {
// aspect_ratio_idc: u(8)
uint8_t aspect_ratio_idc = CopyUInt8(source, destination);
if (aspect_ratio_idc == 255u) { // Extended_SAR
// sar_width/sar_height: u(16) each.
CopyBits(32, source, destination);
}
}
// overscan_info_present_flag: u(1)
uint32_t overscan_info_present_flag = CopyBits(1, source, destination);
if (overscan_info_present_flag) {
// overscan_appropriate_flag: u(1)
CopyBits(1, source, destination);
}
CopyOrRewriteVideoSignalTypeInfo(source, destination, color_space,
out_vui_rewritten);
// chroma_loc_info_present_flag: u(1)
uint32_t chroma_loc_info_present_flag = CopyBits(1, source, destination);
if (chroma_loc_info_present_flag == 1) {
// chroma_sample_loc_type_(top|bottom)_field: ue(v) each.
CopyExpGolomb(source, destination);
CopyExpGolomb(source, destination);
}
// timing_info_present_flag: u(1)
uint32_t timing_info_present_flag = CopyBits(1, source, destination);
if (timing_info_present_flag == 1) {
// num_units_in_tick, time_scale: u(32) each
CopyBits(32, source, destination);
CopyBits(32, source, destination);
// fixed_frame_rate_flag: u(1)
CopyBits(1, source, destination);
}
// nal_hrd_parameters_present_flag: u(1)
uint32_t nal_hrd_parameters_present_flag = CopyBits(1, source, destination);
if (nal_hrd_parameters_present_flag == 1) {
CopyHrdParameters(source, destination);
}
// vcl_hrd_parameters_present_flag: u(1)
uint32_t vcl_hrd_parameters_present_flag = CopyBits(1, source, destination);
if (vcl_hrd_parameters_present_flag == 1) {
CopyHrdParameters(source, destination);
}
if (nal_hrd_parameters_present_flag == 1 ||
vcl_hrd_parameters_present_flag == 1) {
// low_delay_hrd_flag: u(1)
CopyBits(1, source, destination);
}
// pic_struct_present_flag: u(1)
CopyBits(1, source, destination);
// bitstream_restriction_flag: u(1)
uint32_t bitstream_restriction_flag = source.ReadBit();
RETURN_FALSE_ON_FAIL(destination.WriteBits(1, 1));
if (bitstream_restriction_flag == 0) {
// We're adding one from scratch.
RETURN_FALSE_ON_FAIL(
AddBitstreamRestriction(&destination, sps.max_num_ref_frames));
out_vui_rewritten = SpsVuiRewriter::ParseResult::kVuiRewritten;
} else {
// We're replacing.
// motion_vectors_over_pic_boundaries_flag: u(1)
CopyBits(1, source, destination);
// max_bytes_per_pic_denom: ue(v)
CopyExpGolomb(source, destination);
// max_bits_per_mb_denom: ue(v)
CopyExpGolomb(source, destination);
// log2_max_mv_length_horizontal: ue(v)
CopyExpGolomb(source, destination);
// log2_max_mv_length_vertical: ue(v)
CopyExpGolomb(source, destination);
// ********* IMPORTANT! **********
// The next two are the ones we need to set to low numbers:
// max_num_reorder_frames: ue(v)
// max_dec_frame_buffering: ue(v)
// However, if they are already set to no greater than the numbers we
// want, then we don't need to be rewriting.
uint32_t max_num_reorder_frames = source.ReadExponentialGolomb();
uint32_t max_dec_frame_buffering = source.ReadExponentialGolomb();
RETURN_FALSE_ON_FAIL(destination.WriteExponentialGolomb(0));
RETURN_FALSE_ON_FAIL(
destination.WriteExponentialGolomb(sps.max_num_ref_frames));
if (max_num_reorder_frames != 0 ||
max_dec_frame_buffering > sps.max_num_ref_frames) {
out_vui_rewritten = SpsVuiRewriter::ParseResult::kVuiRewritten;
}
}
}
return source.Ok();
}
// Copies a VUI HRD parameters segment.
void CopyHrdParameters(BitstreamReader& source,
rtc::BitBufferWriter& destination) {
// cbp_cnt_minus1: ue(v)
uint32_t cbp_cnt_minus1 = CopyExpGolomb(source, destination);
// bit_rate_scale and cbp_size_scale: u(4) each
CopyBits(8, source, destination);
for (size_t i = 0; source.Ok() && i <= cbp_cnt_minus1; ++i) {
// bit_rate_value_minus1 and cbp_size_value_minus1: ue(v) each
CopyExpGolomb(source, destination);
CopyExpGolomb(source, destination);
// cbr_flag: u(1)
CopyBits(1, source, destination);
}
// initial_cbp_removal_delay_length_minus1: u(5)
// cbp_removal_delay_length_minus1: u(5)
// dbp_output_delay_length_minus1: u(5)
// time_offset_length: u(5)
CopyBits(5 * 4, source, destination);
}
// These functions are similar to webrtc::H264SpsParser::Parse, and based on the
// same version of the H.264 standard. You can find it here:
// http://www.itu.int/rec/T-REC-H.264
// Adds a bitstream restriction VUI segment.
bool AddBitstreamRestriction(rtc::BitBufferWriter* destination,
uint32_t max_num_ref_frames) {
// motion_vectors_over_pic_boundaries_flag: u(1)
// Default is 1 when not present.
RETURN_FALSE_ON_FAIL(destination->WriteBits(1, 1));
// max_bytes_per_pic_denom: ue(v)
// Default is 2 when not present.
RETURN_FALSE_ON_FAIL(destination->WriteExponentialGolomb(2));
// max_bits_per_mb_denom: ue(v)
// Default is 1 when not present.
RETURN_FALSE_ON_FAIL(destination->WriteExponentialGolomb(1));
// log2_max_mv_length_horizontal: ue(v)
// log2_max_mv_length_vertical: ue(v)
// Both default to 16 when not present.
RETURN_FALSE_ON_FAIL(destination->WriteExponentialGolomb(16));
RETURN_FALSE_ON_FAIL(destination->WriteExponentialGolomb(16));
// ********* IMPORTANT! **********
// max_num_reorder_frames: ue(v)
RETURN_FALSE_ON_FAIL(destination->WriteExponentialGolomb(0));
// max_dec_frame_buffering: ue(v)
RETURN_FALSE_ON_FAIL(destination->WriteExponentialGolomb(max_num_ref_frames));
return true;
}
bool IsDefaultColorSpace(const ColorSpace& color_space) {
return color_space.range() != ColorSpace::RangeID::kFull &&
color_space.primaries() == ColorSpace::PrimaryID::kUnspecified &&
color_space.transfer() == ColorSpace::TransferID::kUnspecified &&
color_space.matrix() == ColorSpace::MatrixID::kUnspecified;
}
bool AddVideoSignalTypeInfo(rtc::BitBufferWriter& destination,
const ColorSpace& color_space) {
// video_format: u(3).
RETURN_FALSE_ON_FAIL(destination.WriteBits(5, 3)); // 5 = Unspecified
// video_full_range_flag: u(1)
RETURN_FALSE_ON_FAIL(destination.WriteBits(
color_space.range() == ColorSpace::RangeID::kFull ? 1 : 0, 1));
// colour_description_present_flag: u(1)
RETURN_FALSE_ON_FAIL(destination.WriteBits(1, 1));
// colour_primaries: u(8)
RETURN_FALSE_ON_FAIL(
destination.WriteUInt8(static_cast<uint8_t>(color_space.primaries())));
// transfer_characteristics: u(8)
RETURN_FALSE_ON_FAIL(
destination.WriteUInt8(static_cast<uint8_t>(color_space.transfer())));
// matrix_coefficients: u(8)
RETURN_FALSE_ON_FAIL(
destination.WriteUInt8(static_cast<uint8_t>(color_space.matrix())));
return true;
}
bool CopyOrRewriteVideoSignalTypeInfo(
BitstreamReader& source,
rtc::BitBufferWriter& destination,
const ColorSpace* color_space,
SpsVuiRewriter::ParseResult& out_vui_rewritten) {
// Read.
uint32_t video_format = 5; // H264 default: unspecified
uint32_t video_full_range_flag = 0; // H264 default: limited
uint32_t colour_description_present_flag = 0;
uint8_t colour_primaries = 3; // H264 default: unspecified
uint8_t transfer_characteristics = 3; // H264 default: unspecified
uint8_t matrix_coefficients = 3; // H264 default: unspecified
uint32_t video_signal_type_present_flag = source.ReadBit();
if (video_signal_type_present_flag) {
video_format = source.ReadBits(3);
video_full_range_flag = source.ReadBit();
colour_description_present_flag = source.ReadBit();
if (colour_description_present_flag) {
colour_primaries = source.Read<uint8_t>();
transfer_characteristics = source.Read<uint8_t>();
matrix_coefficients = source.Read<uint8_t>();
}
}
RETURN_FALSE_ON_FAIL(source.Ok());
// Update.
uint32_t video_signal_type_present_flag_override =
video_signal_type_present_flag;
uint32_t video_format_override = video_format;
uint32_t video_full_range_flag_override = video_full_range_flag;
uint32_t colour_description_present_flag_override =
colour_description_present_flag;
uint8_t colour_primaries_override = colour_primaries;
uint8_t transfer_characteristics_override = transfer_characteristics;
uint8_t matrix_coefficients_override = matrix_coefficients;
if (color_space) {
if (IsDefaultColorSpace(*color_space)) {
video_signal_type_present_flag_override = 0;
} else {
video_signal_type_present_flag_override = 1;
video_format_override = 5; // unspecified
if (color_space->range() == ColorSpace::RangeID::kFull) {
video_full_range_flag_override = 1;
} else {
// ColorSpace::RangeID::kInvalid and kDerived are treated as limited.
video_full_range_flag_override = 0;
}
colour_description_present_flag_override =
color_space->primaries() != ColorSpace::PrimaryID::kUnspecified ||
color_space->transfer() != ColorSpace::TransferID::kUnspecified ||
color_space->matrix() != ColorSpace::MatrixID::kUnspecified;
colour_primaries_override =
static_cast<uint8_t>(color_space->primaries());
transfer_characteristics_override =
static_cast<uint8_t>(color_space->transfer());
matrix_coefficients_override =
static_cast<uint8_t>(color_space->matrix());
}
}
// Write.
RETURN_FALSE_ON_FAIL(
destination.WriteBits(video_signal_type_present_flag_override, 1));
if (video_signal_type_present_flag_override) {
RETURN_FALSE_ON_FAIL(destination.WriteBits(video_format_override, 3));
RETURN_FALSE_ON_FAIL(
destination.WriteBits(video_full_range_flag_override, 1));
RETURN_FALSE_ON_FAIL(
destination.WriteBits(colour_description_present_flag_override, 1));
if (colour_description_present_flag_override) {
RETURN_FALSE_ON_FAIL(destination.WriteUInt8(colour_primaries_override));
RETURN_FALSE_ON_FAIL(
destination.WriteUInt8(transfer_characteristics_override));
RETURN_FALSE_ON_FAIL(
destination.WriteUInt8(matrix_coefficients_override));
}
}
if (video_signal_type_present_flag_override !=
video_signal_type_present_flag ||
video_format_override != video_format ||
video_full_range_flag_override != video_full_range_flag ||
colour_description_present_flag_override !=
colour_description_present_flag ||
colour_primaries_override != colour_primaries ||
transfer_characteristics_override != transfer_characteristics ||
matrix_coefficients_override != matrix_coefficients) {
out_vui_rewritten = SpsVuiRewriter::ParseResult::kVuiRewritten;
}
return true;
}
bool CopyRemainingBits(BitstreamReader& source,
rtc::BitBufferWriter& destination) {
// Try to get at least the destination aligned.
if (source.RemainingBitCount() > 0 && source.RemainingBitCount() % 8 != 0) {
size_t misaligned_bits = source.RemainingBitCount() % 8;
CopyBits(misaligned_bits, source, destination);
}
while (source.RemainingBitCount() > 0) {
int count = std::min(32, source.RemainingBitCount());
CopyBits(count, source, destination);
}
// TODO(noahric): The last byte could be all zeroes now, which we should just
// strip.
return source.Ok();
}
} // namespace
} // namespace webrtc

View file

@ -0,0 +1,72 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*
*/
#ifndef COMMON_VIDEO_H264_SPS_VUI_REWRITER_H_
#define COMMON_VIDEO_H264_SPS_VUI_REWRITER_H_
#include <stddef.h>
#include <stdint.h>
#include "absl/types/optional.h"
#include "api/video/color_space.h"
#include "common_video/h264/sps_parser.h"
#include "rtc_base/buffer.h"
namespace webrtc {
// A class that can parse an SPS+VUI and if necessary creates a copy with
// updated parameters.
// The rewriter disables frame buffering. This should force decoders to deliver
// decoded frame immediately and, thus, reduce latency.
// The rewriter updates video signal type parameters if external parameters are
// provided.
class SpsVuiRewriter : private SpsParser {
public:
enum class ParseResult { kFailure, kVuiOk, kVuiRewritten };
enum class Direction { kIncoming, kOutgoing };
// Parses an SPS block and if necessary copies it and rewrites the VUI.
// Returns kFailure on failure, kParseOk if parsing succeeded and no update
// was necessary and kParsedAndModified if an updated copy of buffer was
// written to destination. destination may be populated with some data even if
// no rewrite was necessary, but the end offset should remain unchanged.
// Unless parsing fails, the sps parameter will be populated with the parsed
// SPS state. This function assumes that any previous headers
// (NALU start, type, Stap-A, etc) have already been parsed and that RBSP
// decoding has been performed.
static ParseResult ParseAndRewriteSps(
const uint8_t* buffer,
size_t length,
absl::optional<SpsParser::SpsState>* sps,
const ColorSpace* color_space,
rtc::Buffer* destination,
Direction Direction);
// Parses NAL units from `buffer`, strips AUD blocks and rewrites VUI in SPS
// blocks if necessary.
static rtc::Buffer ParseOutgoingBitstreamAndRewrite(
rtc::ArrayView<const uint8_t> buffer,
const ColorSpace* color_space);
private:
static ParseResult ParseAndRewriteSps(
const uint8_t* buffer,
size_t length,
absl::optional<SpsParser::SpsState>* sps,
const ColorSpace* color_space,
rtc::Buffer* destination);
static void UpdateStats(ParseResult result, Direction direction);
};
} // namespace webrtc
#endif // COMMON_VIDEO_H264_SPS_VUI_REWRITER_H_