Update to current webrtc library
This is from the upstream library commit id 3326535126e435f1ba647885ce43a8f0f3d317eb, corresponding to Chromium 88.0.4290.1.
This commit is contained in:
187
webrtc/api/video/color_space.cc
Normal file
187
webrtc/api/video/color_space.cc
Normal file
@ -0,0 +1,187 @@
|
||||
/*
|
||||
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/color_space.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace {
|
||||
// Try to convert |enum_value| into the enum class T. |enum_bitmask| is created
|
||||
// by the funciton below. Returns true if conversion was successful, false
|
||||
// otherwise.
|
||||
template <typename T>
|
||||
bool SetFromUint8(uint8_t enum_value, uint64_t enum_bitmask, T* out) {
|
||||
if ((enum_value < 64) && ((enum_bitmask >> enum_value) & 1)) {
|
||||
*out = static_cast<T>(enum_value);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// This function serves as an assert for the constexpr function below. It's on
|
||||
// purpose not declared as constexpr so that it causes a build problem if enum
|
||||
// values of 64 or above are used. The bitmask and the code generating it would
|
||||
// have to be extended if the standard is updated to include enum values >= 64.
|
||||
int EnumMustBeLessThan64() {
|
||||
return -1;
|
||||
}
|
||||
|
||||
template <typename T, size_t N>
|
||||
constexpr int MakeMask(const int index, const int length, T (&values)[N]) {
|
||||
return length > 1
|
||||
? (MakeMask(index, 1, values) +
|
||||
MakeMask(index + 1, length - 1, values))
|
||||
: (static_cast<uint8_t>(values[index]) < 64
|
||||
? (uint64_t{1} << static_cast<uint8_t>(values[index]))
|
||||
: EnumMustBeLessThan64());
|
||||
}
|
||||
|
||||
// Create a bitmask where each bit corresponds to one potential enum value.
|
||||
// |values| should be an array listing all possible enum values. The bit is set
|
||||
// to one if the corresponding enum exists. Only works for enums with values
|
||||
// less than 64.
|
||||
template <typename T, size_t N>
|
||||
constexpr uint64_t CreateEnumBitmask(T (&values)[N]) {
|
||||
return MakeMask(0, N, values);
|
||||
}
|
||||
|
||||
bool SetChromaSitingFromUint8(uint8_t enum_value,
|
||||
ColorSpace::ChromaSiting* chroma_siting) {
|
||||
constexpr ColorSpace::ChromaSiting kChromaSitings[] = {
|
||||
ColorSpace::ChromaSiting::kUnspecified,
|
||||
ColorSpace::ChromaSiting::kCollocated, ColorSpace::ChromaSiting::kHalf};
|
||||
constexpr uint64_t enum_bitmask = CreateEnumBitmask(kChromaSitings);
|
||||
|
||||
return SetFromUint8(enum_value, enum_bitmask, chroma_siting);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
ColorSpace::ColorSpace() = default;
|
||||
ColorSpace::ColorSpace(const ColorSpace& other) = default;
|
||||
ColorSpace::ColorSpace(ColorSpace&& other) = default;
|
||||
ColorSpace& ColorSpace::operator=(const ColorSpace& other) = default;
|
||||
|
||||
ColorSpace::ColorSpace(PrimaryID primaries,
|
||||
TransferID transfer,
|
||||
MatrixID matrix,
|
||||
RangeID range)
|
||||
: ColorSpace(primaries,
|
||||
transfer,
|
||||
matrix,
|
||||
range,
|
||||
ChromaSiting::kUnspecified,
|
||||
ChromaSiting::kUnspecified,
|
||||
nullptr) {}
|
||||
|
||||
ColorSpace::ColorSpace(PrimaryID primaries,
|
||||
TransferID transfer,
|
||||
MatrixID matrix,
|
||||
RangeID range,
|
||||
ChromaSiting chroma_siting_horz,
|
||||
ChromaSiting chroma_siting_vert,
|
||||
const HdrMetadata* hdr_metadata)
|
||||
: primaries_(primaries),
|
||||
transfer_(transfer),
|
||||
matrix_(matrix),
|
||||
range_(range),
|
||||
chroma_siting_horizontal_(chroma_siting_horz),
|
||||
chroma_siting_vertical_(chroma_siting_vert),
|
||||
hdr_metadata_(hdr_metadata ? absl::make_optional(*hdr_metadata)
|
||||
: absl::nullopt) {}
|
||||
|
||||
ColorSpace::PrimaryID ColorSpace::primaries() const {
|
||||
return primaries_;
|
||||
}
|
||||
|
||||
ColorSpace::TransferID ColorSpace::transfer() const {
|
||||
return transfer_;
|
||||
}
|
||||
|
||||
ColorSpace::MatrixID ColorSpace::matrix() const {
|
||||
return matrix_;
|
||||
}
|
||||
|
||||
ColorSpace::RangeID ColorSpace::range() const {
|
||||
return range_;
|
||||
}
|
||||
|
||||
ColorSpace::ChromaSiting ColorSpace::chroma_siting_horizontal() const {
|
||||
return chroma_siting_horizontal_;
|
||||
}
|
||||
|
||||
ColorSpace::ChromaSiting ColorSpace::chroma_siting_vertical() const {
|
||||
return chroma_siting_vertical_;
|
||||
}
|
||||
|
||||
const HdrMetadata* ColorSpace::hdr_metadata() const {
|
||||
return hdr_metadata_ ? &*hdr_metadata_ : nullptr;
|
||||
}
|
||||
|
||||
bool ColorSpace::set_primaries_from_uint8(uint8_t enum_value) {
|
||||
constexpr PrimaryID kPrimaryIds[] = {
|
||||
PrimaryID::kBT709, PrimaryID::kUnspecified, PrimaryID::kBT470M,
|
||||
PrimaryID::kBT470BG, PrimaryID::kSMPTE170M, PrimaryID::kSMPTE240M,
|
||||
PrimaryID::kFILM, PrimaryID::kBT2020, PrimaryID::kSMPTEST428,
|
||||
PrimaryID::kSMPTEST431, PrimaryID::kSMPTEST432, PrimaryID::kJEDECP22};
|
||||
constexpr uint64_t enum_bitmask = CreateEnumBitmask(kPrimaryIds);
|
||||
|
||||
return SetFromUint8(enum_value, enum_bitmask, &primaries_);
|
||||
}
|
||||
|
||||
bool ColorSpace::set_transfer_from_uint8(uint8_t enum_value) {
|
||||
constexpr TransferID kTransferIds[] = {
|
||||
TransferID::kBT709, TransferID::kUnspecified,
|
||||
TransferID::kGAMMA22, TransferID::kGAMMA28,
|
||||
TransferID::kSMPTE170M, TransferID::kSMPTE240M,
|
||||
TransferID::kLINEAR, TransferID::kLOG,
|
||||
TransferID::kLOG_SQRT, TransferID::kIEC61966_2_4,
|
||||
TransferID::kBT1361_ECG, TransferID::kIEC61966_2_1,
|
||||
TransferID::kBT2020_10, TransferID::kBT2020_12,
|
||||
TransferID::kSMPTEST2084, TransferID::kSMPTEST428,
|
||||
TransferID::kARIB_STD_B67};
|
||||
constexpr uint64_t enum_bitmask = CreateEnumBitmask(kTransferIds);
|
||||
|
||||
return SetFromUint8(enum_value, enum_bitmask, &transfer_);
|
||||
}
|
||||
|
||||
bool ColorSpace::set_matrix_from_uint8(uint8_t enum_value) {
|
||||
constexpr MatrixID kMatrixIds[] = {
|
||||
MatrixID::kRGB, MatrixID::kBT709, MatrixID::kUnspecified,
|
||||
MatrixID::kFCC, MatrixID::kBT470BG, MatrixID::kSMPTE170M,
|
||||
MatrixID::kSMPTE240M, MatrixID::kYCOCG, MatrixID::kBT2020_NCL,
|
||||
MatrixID::kBT2020_CL, MatrixID::kSMPTE2085, MatrixID::kCDNCLS,
|
||||
MatrixID::kCDCLS, MatrixID::kBT2100_ICTCP};
|
||||
constexpr uint64_t enum_bitmask = CreateEnumBitmask(kMatrixIds);
|
||||
|
||||
return SetFromUint8(enum_value, enum_bitmask, &matrix_);
|
||||
}
|
||||
|
||||
bool ColorSpace::set_range_from_uint8(uint8_t enum_value) {
|
||||
constexpr RangeID kRangeIds[] = {RangeID::kInvalid, RangeID::kLimited,
|
||||
RangeID::kFull, RangeID::kDerived};
|
||||
constexpr uint64_t enum_bitmask = CreateEnumBitmask(kRangeIds);
|
||||
|
||||
return SetFromUint8(enum_value, enum_bitmask, &range_);
|
||||
}
|
||||
|
||||
bool ColorSpace::set_chroma_siting_horizontal_from_uint8(uint8_t enum_value) {
|
||||
return SetChromaSitingFromUint8(enum_value, &chroma_siting_horizontal_);
|
||||
}
|
||||
|
||||
bool ColorSpace::set_chroma_siting_vertical_from_uint8(uint8_t enum_value) {
|
||||
return SetChromaSitingFromUint8(enum_value, &chroma_siting_vertical_);
|
||||
}
|
||||
|
||||
void ColorSpace::set_hdr_metadata(const HdrMetadata* hdr_metadata) {
|
||||
hdr_metadata_ =
|
||||
hdr_metadata ? absl::make_optional(*hdr_metadata) : absl::nullopt;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
178
webrtc/api/video/color_space.h
Normal file
178
webrtc/api/video/color_space.h
Normal file
@ -0,0 +1,178 @@
|
||||
/*
|
||||
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_COLOR_SPACE_H_
|
||||
#define API_VIDEO_COLOR_SPACE_H_
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/video/hdr_metadata.h"
|
||||
#include "rtc_base/system/rtc_export.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// This class represents color information as specified in T-REC H.273,
|
||||
// available from https://www.itu.int/rec/T-REC-H.273.
|
||||
//
|
||||
// WebRTC's supported codecs:
|
||||
// - VP9 supports color profiles, see VP9 Bitstream & Decoding Process
|
||||
// Specification Version 0.6 Section 7.2.2 "Color config semantics" available
|
||||
// from https://www.webmproject.org.
|
||||
// - VP8 only supports BT.601, see
|
||||
// https://tools.ietf.org/html/rfc6386#section-9.2
|
||||
// - H264 uses the exact same representation as T-REC H.273. See T-REC-H.264
|
||||
// E.2.1, "VUI parameters semantics", available from
|
||||
// https://www.itu.int/rec/T-REC-H.264.
|
||||
|
||||
class RTC_EXPORT ColorSpace {
|
||||
public:
|
||||
enum class PrimaryID : uint8_t {
|
||||
// The indices are equal to the values specified in T-REC H.273 Table 2.
|
||||
kBT709 = 1,
|
||||
kUnspecified = 2,
|
||||
kBT470M = 4,
|
||||
kBT470BG = 5,
|
||||
kSMPTE170M = 6, // Identical to BT601
|
||||
kSMPTE240M = 7,
|
||||
kFILM = 8,
|
||||
kBT2020 = 9,
|
||||
kSMPTEST428 = 10,
|
||||
kSMPTEST431 = 11,
|
||||
kSMPTEST432 = 12,
|
||||
kJEDECP22 = 22, // Identical to EBU3213-E
|
||||
// When adding/removing entries here, please make sure to do the
|
||||
// corresponding change to kPrimaryIds.
|
||||
};
|
||||
|
||||
enum class TransferID : uint8_t {
|
||||
// The indices are equal to the values specified in T-REC H.273 Table 3.
|
||||
kBT709 = 1,
|
||||
kUnspecified = 2,
|
||||
kGAMMA22 = 4,
|
||||
kGAMMA28 = 5,
|
||||
kSMPTE170M = 6,
|
||||
kSMPTE240M = 7,
|
||||
kLINEAR = 8,
|
||||
kLOG = 9,
|
||||
kLOG_SQRT = 10,
|
||||
kIEC61966_2_4 = 11,
|
||||
kBT1361_ECG = 12,
|
||||
kIEC61966_2_1 = 13,
|
||||
kBT2020_10 = 14,
|
||||
kBT2020_12 = 15,
|
||||
kSMPTEST2084 = 16,
|
||||
kSMPTEST428 = 17,
|
||||
kARIB_STD_B67 = 18,
|
||||
// When adding/removing entries here, please make sure to do the
|
||||
// corresponding change to kTransferIds.
|
||||
};
|
||||
|
||||
enum class MatrixID : uint8_t {
|
||||
// The indices are equal to the values specified in T-REC H.273 Table 4.
|
||||
kRGB = 0,
|
||||
kBT709 = 1,
|
||||
kUnspecified = 2,
|
||||
kFCC = 4,
|
||||
kBT470BG = 5,
|
||||
kSMPTE170M = 6,
|
||||
kSMPTE240M = 7,
|
||||
kYCOCG = 8,
|
||||
kBT2020_NCL = 9,
|
||||
kBT2020_CL = 10,
|
||||
kSMPTE2085 = 11,
|
||||
kCDNCLS = 12,
|
||||
kCDCLS = 13,
|
||||
kBT2100_ICTCP = 14,
|
||||
// When adding/removing entries here, please make sure to do the
|
||||
// corresponding change to kMatrixIds.
|
||||
};
|
||||
|
||||
enum class RangeID {
|
||||
// The indices are equal to the values specified at
|
||||
// https://www.webmproject.org/docs/container/#colour for the element Range.
|
||||
kInvalid = 0,
|
||||
// Limited Rec. 709 color range with RGB values ranging from 16 to 235.
|
||||
kLimited = 1,
|
||||
// Full RGB color range with RGB valees from 0 to 255.
|
||||
kFull = 2,
|
||||
// Range is defined by MatrixCoefficients/TransferCharacteristics.
|
||||
kDerived = 3,
|
||||
// When adding/removing entries here, please make sure to do the
|
||||
// corresponding change to kRangeIds.
|
||||
};
|
||||
|
||||
enum class ChromaSiting {
|
||||
// Chroma siting specifies how chroma is subsampled relative to the luma
|
||||
// samples in a YUV video frame.
|
||||
// The indices are equal to the values specified at
|
||||
// https://www.webmproject.org/docs/container/#colour for the element
|
||||
// ChromaSitingVert and ChromaSitingHorz.
|
||||
kUnspecified = 0,
|
||||
kCollocated = 1,
|
||||
kHalf = 2,
|
||||
// When adding/removing entries here, please make sure to do the
|
||||
// corresponding change to kChromaSitings.
|
||||
};
|
||||
|
||||
ColorSpace();
|
||||
ColorSpace(const ColorSpace& other);
|
||||
ColorSpace(ColorSpace&& other);
|
||||
ColorSpace& operator=(const ColorSpace& other);
|
||||
ColorSpace(PrimaryID primaries,
|
||||
TransferID transfer,
|
||||
MatrixID matrix,
|
||||
RangeID range);
|
||||
ColorSpace(PrimaryID primaries,
|
||||
TransferID transfer,
|
||||
MatrixID matrix,
|
||||
RangeID range,
|
||||
ChromaSiting chroma_siting_horizontal,
|
||||
ChromaSiting chroma_siting_vertical,
|
||||
const HdrMetadata* hdr_metadata);
|
||||
friend bool operator==(const ColorSpace& lhs, const ColorSpace& rhs) {
|
||||
return lhs.primaries_ == rhs.primaries_ && lhs.transfer_ == rhs.transfer_ &&
|
||||
lhs.matrix_ == rhs.matrix_ && lhs.range_ == rhs.range_ &&
|
||||
lhs.chroma_siting_horizontal_ == rhs.chroma_siting_horizontal_ &&
|
||||
lhs.chroma_siting_vertical_ == rhs.chroma_siting_vertical_ &&
|
||||
lhs.hdr_metadata_ == rhs.hdr_metadata_;
|
||||
}
|
||||
friend bool operator!=(const ColorSpace& lhs, const ColorSpace& rhs) {
|
||||
return !(lhs == rhs);
|
||||
}
|
||||
|
||||
PrimaryID primaries() const;
|
||||
TransferID transfer() const;
|
||||
MatrixID matrix() const;
|
||||
RangeID range() const;
|
||||
ChromaSiting chroma_siting_horizontal() const;
|
||||
ChromaSiting chroma_siting_vertical() const;
|
||||
const HdrMetadata* hdr_metadata() const;
|
||||
|
||||
bool set_primaries_from_uint8(uint8_t enum_value);
|
||||
bool set_transfer_from_uint8(uint8_t enum_value);
|
||||
bool set_matrix_from_uint8(uint8_t enum_value);
|
||||
bool set_range_from_uint8(uint8_t enum_value);
|
||||
bool set_chroma_siting_horizontal_from_uint8(uint8_t enum_value);
|
||||
bool set_chroma_siting_vertical_from_uint8(uint8_t enum_value);
|
||||
void set_hdr_metadata(const HdrMetadata* hdr_metadata);
|
||||
|
||||
private:
|
||||
PrimaryID primaries_ = PrimaryID::kUnspecified;
|
||||
TransferID transfer_ = TransferID::kUnspecified;
|
||||
MatrixID matrix_ = MatrixID::kUnspecified;
|
||||
RangeID range_ = RangeID::kInvalid;
|
||||
ChromaSiting chroma_siting_horizontal_ = ChromaSiting::kUnspecified;
|
||||
ChromaSiting chroma_siting_vertical_ = ChromaSiting::kUnspecified;
|
||||
absl::optional<HdrMetadata> hdr_metadata_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
#endif // API_VIDEO_COLOR_SPACE_H_
|
21
webrtc/api/video/hdr_metadata.cc
Normal file
21
webrtc/api/video/hdr_metadata.cc
Normal file
@ -0,0 +1,21 @@
|
||||
/*
|
||||
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/hdr_metadata.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
HdrMasteringMetadata::Chromaticity::Chromaticity() = default;
|
||||
|
||||
HdrMasteringMetadata::HdrMasteringMetadata() = default;
|
||||
|
||||
HdrMetadata::HdrMetadata() = default;
|
||||
|
||||
} // namespace webrtc
|
105
webrtc/api/video/hdr_metadata.h
Normal file
105
webrtc/api/video/hdr_metadata.h
Normal file
@ -0,0 +1,105 @@
|
||||
/*
|
||||
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_HDR_METADATA_H_
|
||||
#define API_VIDEO_HDR_METADATA_H_
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// SMPTE ST 2086 mastering metadata,
|
||||
// see https://ieeexplore.ieee.org/document/8353899.
|
||||
struct HdrMasteringMetadata {
|
||||
struct Chromaticity {
|
||||
Chromaticity();
|
||||
|
||||
bool operator==(const Chromaticity& rhs) const {
|
||||
return x == rhs.x && y == rhs.y;
|
||||
}
|
||||
|
||||
bool Validate() const {
|
||||
return x >= 0.0 && x <= 1.0 && y >= 0.0 && y <= 1.0;
|
||||
}
|
||||
|
||||
// xy chromaticity coordinates must be calculated as specified in ISO
|
||||
// 11664-3:2012 Section 7, and must be specified with four decimal places.
|
||||
// The x coordinate should be in the range [0.0001, 0.7400] and the y
|
||||
// coordinate should be in the range [0.0001, 0.8400]. Valid range [0.0000,
|
||||
// 1.0000].
|
||||
float x = 0.0f;
|
||||
float y = 0.0f;
|
||||
};
|
||||
|
||||
HdrMasteringMetadata();
|
||||
|
||||
bool operator==(const HdrMasteringMetadata& rhs) const {
|
||||
return ((primary_r == rhs.primary_r) && (primary_g == rhs.primary_g) &&
|
||||
(primary_b == rhs.primary_b) && (white_point == rhs.white_point) &&
|
||||
(luminance_max == rhs.luminance_max) &&
|
||||
(luminance_min == rhs.luminance_min));
|
||||
}
|
||||
|
||||
bool Validate() const {
|
||||
return luminance_max >= 0.0 && luminance_max <= 20000.0 &&
|
||||
luminance_min >= 0.0 && luminance_min <= 5.0 &&
|
||||
primary_r.Validate() && primary_g.Validate() &&
|
||||
primary_b.Validate() && white_point.Validate();
|
||||
}
|
||||
|
||||
// The nominal primaries of the mastering display.
|
||||
Chromaticity primary_r;
|
||||
Chromaticity primary_g;
|
||||
Chromaticity primary_b;
|
||||
|
||||
// The nominal chromaticity of the white point of the mastering display.
|
||||
Chromaticity white_point;
|
||||
|
||||
// The nominal maximum display luminance of the mastering display. Specified
|
||||
// in the unit candela/m2. The value should be in the range [5, 10000] with
|
||||
// zero decimal places. Valid range [0, 20000].
|
||||
float luminance_max = 0.0f;
|
||||
|
||||
// The nominal minimum display luminance of the mastering display. Specified
|
||||
// in the unit candela/m2. The value should be in the range [0.0001, 5.0000]
|
||||
// with four decimal places. Valid range [0.0000, 5.0000].
|
||||
float luminance_min = 0.0f;
|
||||
};
|
||||
|
||||
// High dynamic range (HDR) metadata common for HDR10 and WebM/VP9-based HDR
|
||||
// formats. This struct replicates the HDRMetadata struct defined in
|
||||
// https://cs.chromium.org/chromium/src/media/base/hdr_metadata.h
|
||||
struct HdrMetadata {
|
||||
HdrMetadata();
|
||||
|
||||
bool operator==(const HdrMetadata& rhs) const {
|
||||
return (
|
||||
(max_content_light_level == rhs.max_content_light_level) &&
|
||||
(max_frame_average_light_level == rhs.max_frame_average_light_level) &&
|
||||
(mastering_metadata == rhs.mastering_metadata));
|
||||
}
|
||||
|
||||
bool Validate() const {
|
||||
return max_content_light_level >= 0 && max_content_light_level <= 20000 &&
|
||||
max_frame_average_light_level >= 0 &&
|
||||
max_frame_average_light_level <= 20000 &&
|
||||
mastering_metadata.Validate();
|
||||
}
|
||||
|
||||
HdrMasteringMetadata mastering_metadata;
|
||||
// Max content light level (CLL), i.e. maximum brightness level present in the
|
||||
// stream, in nits. 1 nit = 1 candela/m2. Valid range [0, 20000].
|
||||
int max_content_light_level = 0;
|
||||
// Max frame-average light level (FALL), i.e. maximum average brightness of
|
||||
// the brightest frame in the stream, in nits. Valid range [0, 20000].
|
||||
int max_frame_average_light_level = 0;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_HDR_METADATA_H_
|
93
webrtc/api/video/video_content_type.cc
Normal file
93
webrtc/api/video/video_content_type.cc
Normal file
@ -0,0 +1,93 @@
|
||||
/*
|
||||
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/video_content_type.h"
|
||||
|
||||
// VideoContentType stored as a single byte, which is sent over the network.
|
||||
// Structure:
|
||||
//
|
||||
// 0 1 2 3 4 5 6 7
|
||||
// +---------------+
|
||||
// |r r e e e s s c|
|
||||
//
|
||||
// where:
|
||||
// r - reserved bits.
|
||||
// e - 3-bit number of an experiment group counted from 1. 0 means there's no
|
||||
// experiment ongoing.
|
||||
// s - 2-bit simulcast stream id or spatial layer, counted from 1. 0 means that
|
||||
// no simulcast information is set.
|
||||
// c - content type. 0 means real-time video, 1 means screenshare.
|
||||
//
|
||||
|
||||
namespace webrtc {
|
||||
namespace videocontenttypehelpers {
|
||||
|
||||
namespace {
|
||||
static constexpr uint8_t kScreenshareBitsSize = 1;
|
||||
static constexpr uint8_t kScreenshareBitsMask =
|
||||
(1u << kScreenshareBitsSize) - 1;
|
||||
|
||||
static constexpr uint8_t kSimulcastShift = 1;
|
||||
static constexpr uint8_t kSimulcastBitsSize = 2;
|
||||
static constexpr uint8_t kSimulcastBitsMask = ((1u << kSimulcastBitsSize) - 1)
|
||||
<< kSimulcastShift; // 0b00000110
|
||||
|
||||
static constexpr uint8_t kExperimentShift = 3;
|
||||
static constexpr uint8_t kExperimentBitsSize = 3;
|
||||
static constexpr uint8_t kExperimentBitsMask =
|
||||
((1u << kExperimentBitsSize) - 1) << kExperimentShift; // 0b00111000
|
||||
|
||||
static constexpr uint8_t kTotalBitsSize =
|
||||
kScreenshareBitsSize + kSimulcastBitsSize + kExperimentBitsSize;
|
||||
} // namespace
|
||||
|
||||
bool SetExperimentId(VideoContentType* content_type, uint8_t experiment_id) {
|
||||
// Store in bits 2-4.
|
||||
if (experiment_id >= (1 << kExperimentBitsSize))
|
||||
return false;
|
||||
*content_type = static_cast<VideoContentType>(
|
||||
(static_cast<uint8_t>(*content_type) & ~kExperimentBitsMask) |
|
||||
((experiment_id << kExperimentShift) & kExperimentBitsMask));
|
||||
return true;
|
||||
}
|
||||
|
||||
bool SetSimulcastId(VideoContentType* content_type, uint8_t simulcast_id) {
|
||||
// Store in bits 5-6.
|
||||
if (simulcast_id >= (1 << kSimulcastBitsSize))
|
||||
return false;
|
||||
*content_type = static_cast<VideoContentType>(
|
||||
(static_cast<uint8_t>(*content_type) & ~kSimulcastBitsMask) |
|
||||
((simulcast_id << kSimulcastShift) & kSimulcastBitsMask));
|
||||
return true;
|
||||
}
|
||||
|
||||
uint8_t GetExperimentId(const VideoContentType& content_type) {
|
||||
return (static_cast<uint8_t>(content_type) & kExperimentBitsMask) >>
|
||||
kExperimentShift;
|
||||
}
|
||||
uint8_t GetSimulcastId(const VideoContentType& content_type) {
|
||||
return (static_cast<uint8_t>(content_type) & kSimulcastBitsMask) >>
|
||||
kSimulcastShift;
|
||||
}
|
||||
|
||||
bool IsScreenshare(const VideoContentType& content_type) {
|
||||
return (static_cast<uint8_t>(content_type) & kScreenshareBitsMask) > 0;
|
||||
}
|
||||
|
||||
bool IsValidContentType(uint8_t value) {
|
||||
// Any 6-bit value is allowed.
|
||||
return value < (1 << kTotalBitsSize);
|
||||
}
|
||||
|
||||
const char* ToString(const VideoContentType& content_type) {
|
||||
return IsScreenshare(content_type) ? "screen" : "realtime";
|
||||
}
|
||||
} // namespace videocontenttypehelpers
|
||||
} // namespace webrtc
|
39
webrtc/api/video/video_content_type.h
Normal file
39
webrtc/api/video/video_content_type.h
Normal file
@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_VIDEO_CONTENT_TYPE_H_
|
||||
#define API_VIDEO_VIDEO_CONTENT_TYPE_H_
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
enum class VideoContentType : uint8_t {
|
||||
UNSPECIFIED = 0,
|
||||
SCREENSHARE = 1,
|
||||
};
|
||||
|
||||
namespace videocontenttypehelpers {
|
||||
bool SetExperimentId(VideoContentType* content_type, uint8_t experiment_id);
|
||||
bool SetSimulcastId(VideoContentType* content_type, uint8_t simulcast_id);
|
||||
|
||||
uint8_t GetExperimentId(const VideoContentType& content_type);
|
||||
uint8_t GetSimulcastId(const VideoContentType& content_type);
|
||||
|
||||
bool IsScreenshare(const VideoContentType& content_type);
|
||||
|
||||
bool IsValidContentType(uint8_t value);
|
||||
|
||||
const char* ToString(const VideoContentType& content_type);
|
||||
} // namespace videocontenttypehelpers
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_VIDEO_CONTENT_TYPE_H_
|
26
webrtc/api/video/video_rotation.h
Normal file
26
webrtc/api/video/video_rotation.h
Normal file
@ -0,0 +1,26 @@
|
||||
/*
|
||||
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_VIDEO_ROTATION_H_
|
||||
#define API_VIDEO_VIDEO_ROTATION_H_
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// enum for clockwise rotation.
|
||||
enum VideoRotation {
|
||||
kVideoRotation_0 = 0,
|
||||
kVideoRotation_90 = 90,
|
||||
kVideoRotation_180 = 180,
|
||||
kVideoRotation_270 = 270
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_VIDEO_ROTATION_H_
|
92
webrtc/api/video/video_timing.cc
Normal file
92
webrtc/api/video/video_timing.cc
Normal file
@ -0,0 +1,92 @@
|
||||
/*
|
||||
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "api/video/video_timing.h"
|
||||
|
||||
#include "api/array_view.h"
|
||||
#include "rtc_base/logging.h"
|
||||
#include "rtc_base/numerics/safe_conversions.h"
|
||||
#include "rtc_base/strings/string_builder.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
uint16_t VideoSendTiming::GetDeltaCappedMs(int64_t base_ms, int64_t time_ms) {
|
||||
if (time_ms < base_ms) {
|
||||
RTC_DLOG(LS_ERROR) << "Delta " << (time_ms - base_ms)
|
||||
<< "ms expected to be positive";
|
||||
}
|
||||
return rtc::saturated_cast<uint16_t>(time_ms - base_ms);
|
||||
}
|
||||
|
||||
TimingFrameInfo::TimingFrameInfo()
|
||||
: rtp_timestamp(0),
|
||||
capture_time_ms(-1),
|
||||
encode_start_ms(-1),
|
||||
encode_finish_ms(-1),
|
||||
packetization_finish_ms(-1),
|
||||
pacer_exit_ms(-1),
|
||||
network_timestamp_ms(-1),
|
||||
network2_timestamp_ms(-1),
|
||||
receive_start_ms(-1),
|
||||
receive_finish_ms(-1),
|
||||
decode_start_ms(-1),
|
||||
decode_finish_ms(-1),
|
||||
render_time_ms(-1),
|
||||
flags(VideoSendTiming::kNotTriggered) {}
|
||||
|
||||
int64_t TimingFrameInfo::EndToEndDelay() const {
|
||||
return capture_time_ms >= 0 ? decode_finish_ms - capture_time_ms : -1;
|
||||
}
|
||||
|
||||
bool TimingFrameInfo::IsLongerThan(const TimingFrameInfo& other) const {
|
||||
int64_t other_delay = other.EndToEndDelay();
|
||||
return other_delay == -1 || EndToEndDelay() > other_delay;
|
||||
}
|
||||
|
||||
bool TimingFrameInfo::operator<(const TimingFrameInfo& other) const {
|
||||
return other.IsLongerThan(*this);
|
||||
}
|
||||
|
||||
bool TimingFrameInfo::operator<=(const TimingFrameInfo& other) const {
|
||||
return !IsLongerThan(other);
|
||||
}
|
||||
|
||||
bool TimingFrameInfo::IsOutlier() const {
|
||||
return !IsInvalid() && (flags & VideoSendTiming::kTriggeredBySize);
|
||||
}
|
||||
|
||||
bool TimingFrameInfo::IsTimerTriggered() const {
|
||||
return !IsInvalid() && (flags & VideoSendTiming::kTriggeredByTimer);
|
||||
}
|
||||
|
||||
bool TimingFrameInfo::IsInvalid() const {
|
||||
return flags == VideoSendTiming::kInvalid;
|
||||
}
|
||||
|
||||
std::string TimingFrameInfo::ToString() const {
|
||||
if (IsInvalid()) {
|
||||
return "";
|
||||
}
|
||||
|
||||
char buf[1024];
|
||||
rtc::SimpleStringBuilder sb(buf);
|
||||
|
||||
sb << rtp_timestamp << ',' << capture_time_ms << ',' << encode_start_ms << ','
|
||||
<< encode_finish_ms << ',' << packetization_finish_ms << ','
|
||||
<< pacer_exit_ms << ',' << network_timestamp_ms << ','
|
||||
<< network2_timestamp_ms << ',' << receive_start_ms << ','
|
||||
<< receive_finish_ms << ',' << decode_start_ms << ',' << decode_finish_ms
|
||||
<< ',' << render_time_ms << ',' << IsOutlier() << ','
|
||||
<< IsTimerTriggered();
|
||||
|
||||
return sb.str();
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
129
webrtc/api/video/video_timing.h
Normal file
129
webrtc/api/video/video_timing.h
Normal file
@ -0,0 +1,129 @@
|
||||
/*
|
||||
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef API_VIDEO_VIDEO_TIMING_H_
|
||||
#define API_VIDEO_VIDEO_TIMING_H_
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#include <limits>
|
||||
#include <string>
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Video timing timestamps in ms counted from capture_time_ms of a frame.
|
||||
// This structure represents data sent in video-timing RTP header extension.
|
||||
struct VideoSendTiming {
|
||||
enum TimingFrameFlags : uint8_t {
|
||||
kNotTriggered = 0, // Timing info valid, but not to be transmitted.
|
||||
// Used on send-side only.
|
||||
kTriggeredByTimer = 1 << 0, // Frame marked for tracing by periodic timer.
|
||||
kTriggeredBySize = 1 << 1, // Frame marked for tracing due to size.
|
||||
kInvalid = std::numeric_limits<uint8_t>::max() // Invalid, ignore!
|
||||
};
|
||||
|
||||
// Returns |time_ms - base_ms| capped at max 16-bit value.
|
||||
// Used to fill this data structure as per
|
||||
// https://webrtc.org/experiments/rtp-hdrext/video-timing/ extension stores
|
||||
// 16-bit deltas of timestamps from packet capture time.
|
||||
static uint16_t GetDeltaCappedMs(int64_t base_ms, int64_t time_ms);
|
||||
|
||||
uint16_t encode_start_delta_ms;
|
||||
uint16_t encode_finish_delta_ms;
|
||||
uint16_t packetization_finish_delta_ms;
|
||||
uint16_t pacer_exit_delta_ms;
|
||||
uint16_t network_timestamp_delta_ms;
|
||||
uint16_t network2_timestamp_delta_ms;
|
||||
uint8_t flags;
|
||||
};
|
||||
|
||||
// Used to report precise timings of a 'timing frames'. Contains all important
|
||||
// timestamps for a lifetime of that specific frame. Reported as a string via
|
||||
// GetStats(). Only frame which took the longest between two GetStats calls is
|
||||
// reported.
|
||||
struct TimingFrameInfo {
|
||||
TimingFrameInfo();
|
||||
|
||||
// Returns end-to-end delay of a frame, if sender and receiver timestamps are
|
||||
// synchronized, -1 otherwise.
|
||||
int64_t EndToEndDelay() const;
|
||||
|
||||
// Returns true if current frame took longer to process than |other| frame.
|
||||
// If other frame's clocks are not synchronized, current frame is always
|
||||
// preferred.
|
||||
bool IsLongerThan(const TimingFrameInfo& other) const;
|
||||
|
||||
// Returns true if flags are set to indicate this frame was marked for tracing
|
||||
// due to the size being outside some limit.
|
||||
bool IsOutlier() const;
|
||||
|
||||
// Returns true if flags are set to indicate this frame was marked fro tracing
|
||||
// due to cyclic timer.
|
||||
bool IsTimerTriggered() const;
|
||||
|
||||
// Returns true if the timing data is marked as invalid, in which case it
|
||||
// should be ignored.
|
||||
bool IsInvalid() const;
|
||||
|
||||
std::string ToString() const;
|
||||
|
||||
bool operator<(const TimingFrameInfo& other) const;
|
||||
|
||||
bool operator<=(const TimingFrameInfo& other) const;
|
||||
|
||||
uint32_t rtp_timestamp; // Identifier of a frame.
|
||||
// All timestamps below are in local monotonous clock of a receiver.
|
||||
// If sender clock is not yet estimated, sender timestamps
|
||||
// (capture_time_ms ... pacer_exit_ms) are negative values, still
|
||||
// relatively correct.
|
||||
int64_t capture_time_ms; // Captrue time of a frame.
|
||||
int64_t encode_start_ms; // Encode start time.
|
||||
int64_t encode_finish_ms; // Encode completion time.
|
||||
int64_t packetization_finish_ms; // Time when frame was passed to pacer.
|
||||
int64_t pacer_exit_ms; // Time when last packet was pushed out of pacer.
|
||||
// Two in-network RTP processor timestamps: meaning is application specific.
|
||||
int64_t network_timestamp_ms;
|
||||
int64_t network2_timestamp_ms;
|
||||
int64_t receive_start_ms; // First received packet time.
|
||||
int64_t receive_finish_ms; // Last received packet time.
|
||||
int64_t decode_start_ms; // Decode start time.
|
||||
int64_t decode_finish_ms; // Decode completion time.
|
||||
int64_t render_time_ms; // Proposed render time to insure smooth playback.
|
||||
|
||||
uint8_t flags; // Flags indicating validity and/or why tracing was triggered.
|
||||
};
|
||||
|
||||
// Minimum and maximum playout delay values from capture to render.
|
||||
// These are best effort values.
|
||||
//
|
||||
// A value < 0 indicates no change from previous valid value.
|
||||
//
|
||||
// min = max = 0 indicates that the receiver should try and render
|
||||
// frame as soon as possible.
|
||||
//
|
||||
// min = x, max = y indicates that the receiver is free to adapt
|
||||
// in the range (x, y) based on network jitter.
|
||||
struct VideoPlayoutDelay {
|
||||
VideoPlayoutDelay() = default;
|
||||
VideoPlayoutDelay(int min_ms, int max_ms) : min_ms(min_ms), max_ms(max_ms) {}
|
||||
int min_ms = -1;
|
||||
int max_ms = -1;
|
||||
|
||||
bool operator==(const VideoPlayoutDelay& rhs) const {
|
||||
return min_ms == rhs.min_ms && max_ms == rhs.max_ms;
|
||||
}
|
||||
};
|
||||
|
||||
// TODO(bugs.webrtc.org/7660): Old name, delete after downstream use is updated.
|
||||
using PlayoutDelay = VideoPlayoutDelay;
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // API_VIDEO_VIDEO_TIMING_H_
|
Reference in New Issue
Block a user