Bump to WebRTC M120 release
Some API deprecation -- ExperimentalAgc and ExperimentalNs are gone. We're continuing to carry iSAC even though it's gone upstream, but maybe we'll want to drop that soon.
This commit is contained in:
@ -10,9 +10,11 @@
|
||||
|
||||
#include "api/video/color_space.h"
|
||||
|
||||
#include "rtc_base/strings/string_builder.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace {
|
||||
// Try to convert |enum_value| into the enum class T. |enum_bitmask| is created
|
||||
// Try to convert `enum_value` into the enum class T. `enum_bitmask` is created
|
||||
// by the funciton below. Returns true if conversion was successful, false
|
||||
// otherwise.
|
||||
template <typename T>
|
||||
@ -43,7 +45,7 @@ constexpr int MakeMask(const int index, const int length, T (&values)[N]) {
|
||||
}
|
||||
|
||||
// Create a bitmask where each bit corresponds to one potential enum value.
|
||||
// |values| should be an array listing all possible enum values. The bit is set
|
||||
// `values` should be an array listing all possible enum values. The bit is set
|
||||
// to one if the corresponding enum exists. Only works for enums with values
|
||||
// less than 64.
|
||||
template <typename T, size_t N>
|
||||
@ -124,6 +126,80 @@ const HdrMetadata* ColorSpace::hdr_metadata() const {
|
||||
return hdr_metadata_ ? &*hdr_metadata_ : nullptr;
|
||||
}
|
||||
|
||||
#define PRINT_ENUM_CASE(TYPE, NAME) \
|
||||
case TYPE::NAME: \
|
||||
ss << #NAME; \
|
||||
break;
|
||||
|
||||
std::string ColorSpace::AsString() const {
|
||||
char buf[1024];
|
||||
rtc::SimpleStringBuilder ss(buf);
|
||||
ss << "{primaries:";
|
||||
switch (primaries_) {
|
||||
PRINT_ENUM_CASE(PrimaryID, kBT709)
|
||||
PRINT_ENUM_CASE(PrimaryID, kUnspecified)
|
||||
PRINT_ENUM_CASE(PrimaryID, kBT470M)
|
||||
PRINT_ENUM_CASE(PrimaryID, kBT470BG)
|
||||
PRINT_ENUM_CASE(PrimaryID, kSMPTE170M)
|
||||
PRINT_ENUM_CASE(PrimaryID, kSMPTE240M)
|
||||
PRINT_ENUM_CASE(PrimaryID, kFILM)
|
||||
PRINT_ENUM_CASE(PrimaryID, kBT2020)
|
||||
PRINT_ENUM_CASE(PrimaryID, kSMPTEST428)
|
||||
PRINT_ENUM_CASE(PrimaryID, kSMPTEST431)
|
||||
PRINT_ENUM_CASE(PrimaryID, kSMPTEST432)
|
||||
PRINT_ENUM_CASE(PrimaryID, kJEDECP22)
|
||||
}
|
||||
ss << ", transfer:";
|
||||
switch (transfer_) {
|
||||
PRINT_ENUM_CASE(TransferID, kBT709)
|
||||
PRINT_ENUM_CASE(TransferID, kUnspecified)
|
||||
PRINT_ENUM_CASE(TransferID, kGAMMA22)
|
||||
PRINT_ENUM_CASE(TransferID, kGAMMA28)
|
||||
PRINT_ENUM_CASE(TransferID, kSMPTE170M)
|
||||
PRINT_ENUM_CASE(TransferID, kSMPTE240M)
|
||||
PRINT_ENUM_CASE(TransferID, kLINEAR)
|
||||
PRINT_ENUM_CASE(TransferID, kLOG)
|
||||
PRINT_ENUM_CASE(TransferID, kLOG_SQRT)
|
||||
PRINT_ENUM_CASE(TransferID, kIEC61966_2_4)
|
||||
PRINT_ENUM_CASE(TransferID, kBT1361_ECG)
|
||||
PRINT_ENUM_CASE(TransferID, kIEC61966_2_1)
|
||||
PRINT_ENUM_CASE(TransferID, kBT2020_10)
|
||||
PRINT_ENUM_CASE(TransferID, kBT2020_12)
|
||||
PRINT_ENUM_CASE(TransferID, kSMPTEST2084)
|
||||
PRINT_ENUM_CASE(TransferID, kSMPTEST428)
|
||||
PRINT_ENUM_CASE(TransferID, kARIB_STD_B67)
|
||||
}
|
||||
ss << ", matrix:";
|
||||
switch (matrix_) {
|
||||
PRINT_ENUM_CASE(MatrixID, kRGB)
|
||||
PRINT_ENUM_CASE(MatrixID, kBT709)
|
||||
PRINT_ENUM_CASE(MatrixID, kUnspecified)
|
||||
PRINT_ENUM_CASE(MatrixID, kFCC)
|
||||
PRINT_ENUM_CASE(MatrixID, kBT470BG)
|
||||
PRINT_ENUM_CASE(MatrixID, kSMPTE170M)
|
||||
PRINT_ENUM_CASE(MatrixID, kSMPTE240M)
|
||||
PRINT_ENUM_CASE(MatrixID, kYCOCG)
|
||||
PRINT_ENUM_CASE(MatrixID, kBT2020_NCL)
|
||||
PRINT_ENUM_CASE(MatrixID, kBT2020_CL)
|
||||
PRINT_ENUM_CASE(MatrixID, kSMPTE2085)
|
||||
PRINT_ENUM_CASE(MatrixID, kCDNCLS)
|
||||
PRINT_ENUM_CASE(MatrixID, kCDCLS)
|
||||
PRINT_ENUM_CASE(MatrixID, kBT2100_ICTCP)
|
||||
}
|
||||
|
||||
ss << ", range:";
|
||||
switch (range_) {
|
||||
PRINT_ENUM_CASE(RangeID, kInvalid)
|
||||
PRINT_ENUM_CASE(RangeID, kLimited)
|
||||
PRINT_ENUM_CASE(RangeID, kFull)
|
||||
PRINT_ENUM_CASE(RangeID, kDerived)
|
||||
}
|
||||
ss << "}";
|
||||
return ss.str();
|
||||
}
|
||||
|
||||
#undef PRINT_ENUM_CASE
|
||||
|
||||
bool ColorSpace::set_primaries_from_uint8(uint8_t enum_value) {
|
||||
constexpr PrimaryID kPrimaryIds[] = {
|
||||
PrimaryID::kBT709, PrimaryID::kUnspecified, PrimaryID::kBT470M,
|
||||
|
@ -13,6 +13,8 @@
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "absl/types/optional.h"
|
||||
#include "api/video/hdr_metadata.h"
|
||||
#include "rtc_base/system/rtc_export.h"
|
||||
@ -101,7 +103,7 @@ class RTC_EXPORT ColorSpace {
|
||||
kInvalid = 0,
|
||||
// Limited Rec. 709 color range with RGB values ranging from 16 to 235.
|
||||
kLimited = 1,
|
||||
// Full RGB color range with RGB valees from 0 to 255.
|
||||
// Full RGB color range with RGB values from 0 to 255.
|
||||
kFull = 2,
|
||||
// Range is defined by MatrixCoefficients/TransferCharacteristics.
|
||||
kDerived = 3,
|
||||
@ -155,6 +157,7 @@ class RTC_EXPORT ColorSpace {
|
||||
ChromaSiting chroma_siting_horizontal() const;
|
||||
ChromaSiting chroma_siting_vertical() const;
|
||||
const HdrMetadata* hdr_metadata() const;
|
||||
std::string AsString() const;
|
||||
|
||||
bool set_primaries_from_uint8(uint8_t enum_value);
|
||||
bool set_transfer_from_uint8(uint8_t enum_value);
|
||||
|
@ -10,21 +10,7 @@
|
||||
|
||||
#include "api/video/video_content_type.h"
|
||||
|
||||
// VideoContentType stored as a single byte, which is sent over the network.
|
||||
// Structure:
|
||||
//
|
||||
// 0 1 2 3 4 5 6 7
|
||||
// +---------------+
|
||||
// |r r e e e s s c|
|
||||
//
|
||||
// where:
|
||||
// r - reserved bits.
|
||||
// e - 3-bit number of an experiment group counted from 1. 0 means there's no
|
||||
// experiment ongoing.
|
||||
// s - 2-bit simulcast stream id or spatial layer, counted from 1. 0 means that
|
||||
// no simulcast information is set.
|
||||
// c - content type. 0 means real-time video, 1 means screenshare.
|
||||
//
|
||||
#include "rtc_base/checks.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace videocontenttypehelpers {
|
||||
@ -33,57 +19,21 @@ namespace {
|
||||
static constexpr uint8_t kScreenshareBitsSize = 1;
|
||||
static constexpr uint8_t kScreenshareBitsMask =
|
||||
(1u << kScreenshareBitsSize) - 1;
|
||||
|
||||
static constexpr uint8_t kSimulcastShift = 1;
|
||||
static constexpr uint8_t kSimulcastBitsSize = 2;
|
||||
static constexpr uint8_t kSimulcastBitsMask = ((1u << kSimulcastBitsSize) - 1)
|
||||
<< kSimulcastShift; // 0b00000110
|
||||
|
||||
static constexpr uint8_t kExperimentShift = 3;
|
||||
static constexpr uint8_t kExperimentBitsSize = 3;
|
||||
static constexpr uint8_t kExperimentBitsMask =
|
||||
((1u << kExperimentBitsSize) - 1) << kExperimentShift; // 0b00111000
|
||||
|
||||
static constexpr uint8_t kTotalBitsSize =
|
||||
kScreenshareBitsSize + kSimulcastBitsSize + kExperimentBitsSize;
|
||||
} // namespace
|
||||
|
||||
bool SetExperimentId(VideoContentType* content_type, uint8_t experiment_id) {
|
||||
// Store in bits 2-4.
|
||||
if (experiment_id >= (1 << kExperimentBitsSize))
|
||||
return false;
|
||||
*content_type = static_cast<VideoContentType>(
|
||||
(static_cast<uint8_t>(*content_type) & ~kExperimentBitsMask) |
|
||||
((experiment_id << kExperimentShift) & kExperimentBitsMask));
|
||||
return true;
|
||||
}
|
||||
|
||||
bool SetSimulcastId(VideoContentType* content_type, uint8_t simulcast_id) {
|
||||
// Store in bits 5-6.
|
||||
if (simulcast_id >= (1 << kSimulcastBitsSize))
|
||||
return false;
|
||||
*content_type = static_cast<VideoContentType>(
|
||||
(static_cast<uint8_t>(*content_type) & ~kSimulcastBitsMask) |
|
||||
((simulcast_id << kSimulcastShift) & kSimulcastBitsMask));
|
||||
return true;
|
||||
}
|
||||
|
||||
uint8_t GetExperimentId(const VideoContentType& content_type) {
|
||||
return (static_cast<uint8_t>(content_type) & kExperimentBitsMask) >>
|
||||
kExperimentShift;
|
||||
}
|
||||
uint8_t GetSimulcastId(const VideoContentType& content_type) {
|
||||
return (static_cast<uint8_t>(content_type) & kSimulcastBitsMask) >>
|
||||
kSimulcastShift;
|
||||
}
|
||||
|
||||
bool IsScreenshare(const VideoContentType& content_type) {
|
||||
// Ensure no bits apart from the screenshare bit is set.
|
||||
// This CHECK is a temporary measure to detect code that introduces
|
||||
// values according to old versions.
|
||||
RTC_CHECK((static_cast<uint8_t>(content_type) & !kScreenshareBitsMask) == 0);
|
||||
return (static_cast<uint8_t>(content_type) & kScreenshareBitsMask) > 0;
|
||||
}
|
||||
|
||||
bool IsValidContentType(uint8_t value) {
|
||||
// Any 6-bit value is allowed.
|
||||
return value < (1 << kTotalBitsSize);
|
||||
// Only the screenshare bit is allowed.
|
||||
// However, due to previous usage of the next 5 bits, we allow
|
||||
// the lower 6 bits to be set.
|
||||
return value < (1 << 6);
|
||||
}
|
||||
|
||||
const char* ToString(const VideoContentType& content_type) {
|
||||
|
@ -15,18 +15,15 @@
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// VideoContentType stored as a single byte, which is sent over the network
|
||||
// in the rtp-hdrext/video-content-type extension.
|
||||
// Only the lowest bit is used, per the enum.
|
||||
enum class VideoContentType : uint8_t {
|
||||
UNSPECIFIED = 0,
|
||||
SCREENSHARE = 1,
|
||||
};
|
||||
|
||||
namespace videocontenttypehelpers {
|
||||
bool SetExperimentId(VideoContentType* content_type, uint8_t experiment_id);
|
||||
bool SetSimulcastId(VideoContentType* content_type, uint8_t simulcast_id);
|
||||
|
||||
uint8_t GetExperimentId(const VideoContentType& content_type);
|
||||
uint8_t GetSimulcastId(const VideoContentType& content_type);
|
||||
|
||||
bool IsScreenshare(const VideoContentType& content_type);
|
||||
|
||||
bool IsValidContentType(uint8_t value);
|
||||
|
@ -10,7 +10,10 @@
|
||||
|
||||
#include "api/video/video_timing.h"
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
#include "api/array_view.h"
|
||||
#include "api/units/time_delta.h"
|
||||
#include "rtc_base/logging.h"
|
||||
#include "rtc_base/numerics/safe_conversions.h"
|
||||
#include "rtc_base/strings/string_builder.h"
|
||||
@ -25,6 +28,14 @@ uint16_t VideoSendTiming::GetDeltaCappedMs(int64_t base_ms, int64_t time_ms) {
|
||||
return rtc::saturated_cast<uint16_t>(time_ms - base_ms);
|
||||
}
|
||||
|
||||
uint16_t VideoSendTiming::GetDeltaCappedMs(TimeDelta delta) {
|
||||
if (delta < TimeDelta::Zero()) {
|
||||
RTC_DLOG(LS_ERROR) << "Delta " << delta.ms()
|
||||
<< "ms expected to be positive";
|
||||
}
|
||||
return rtc::saturated_cast<uint16_t>(delta.ms());
|
||||
}
|
||||
|
||||
TimingFrameInfo::TimingFrameInfo()
|
||||
: rtp_timestamp(0),
|
||||
capture_time_ms(-1),
|
||||
@ -89,4 +100,23 @@ std::string TimingFrameInfo::ToString() const {
|
||||
return sb.str();
|
||||
}
|
||||
|
||||
VideoPlayoutDelay::VideoPlayoutDelay(TimeDelta min, TimeDelta max)
|
||||
: min_(std::clamp(min, TimeDelta::Zero(), kMax)),
|
||||
max_(std::clamp(max, min_, kMax)) {
|
||||
if (!(TimeDelta::Zero() <= min && min <= max && max <= kMax)) {
|
||||
RTC_LOG(LS_ERROR) << "Invalid video playout delay: [" << min << "," << max
|
||||
<< "]. Clamped to [" << this->min() << "," << this->max()
|
||||
<< "]";
|
||||
}
|
||||
}
|
||||
|
||||
bool VideoPlayoutDelay::Set(TimeDelta min, TimeDelta max) {
|
||||
if (TimeDelta::Zero() <= min && min <= max && max <= kMax) {
|
||||
min_ = min;
|
||||
max_ = max;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
@ -16,11 +16,14 @@
|
||||
#include <limits>
|
||||
#include <string>
|
||||
|
||||
#include "api/units/time_delta.h"
|
||||
#include "rtc_base/system/rtc_export.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Video timing timestamps in ms counted from capture_time_ms of a frame.
|
||||
// This structure represents data sent in video-timing RTP header extension.
|
||||
struct VideoSendTiming {
|
||||
struct RTC_EXPORT VideoSendTiming {
|
||||
enum TimingFrameFlags : uint8_t {
|
||||
kNotTriggered = 0, // Timing info valid, but not to be transmitted.
|
||||
// Used on send-side only.
|
||||
@ -34,6 +37,7 @@ struct VideoSendTiming {
|
||||
// https://webrtc.org/experiments/rtp-hdrext/video-timing/ extension stores
|
||||
// 16-bit deltas of timestamps from packet capture time.
|
||||
static uint16_t GetDeltaCappedMs(int64_t base_ms, int64_t time_ms);
|
||||
static uint16_t GetDeltaCappedMs(TimeDelta delta);
|
||||
|
||||
uint16_t encode_start_delta_ms;
|
||||
uint16_t encode_finish_delta_ms;
|
||||
@ -41,21 +45,21 @@ struct VideoSendTiming {
|
||||
uint16_t pacer_exit_delta_ms;
|
||||
uint16_t network_timestamp_delta_ms;
|
||||
uint16_t network2_timestamp_delta_ms;
|
||||
uint8_t flags;
|
||||
uint8_t flags = TimingFrameFlags::kInvalid;
|
||||
};
|
||||
|
||||
// Used to report precise timings of a 'timing frames'. Contains all important
|
||||
// timestamps for a lifetime of that specific frame. Reported as a string via
|
||||
// GetStats(). Only frame which took the longest between two GetStats calls is
|
||||
// reported.
|
||||
struct TimingFrameInfo {
|
||||
struct RTC_EXPORT TimingFrameInfo {
|
||||
TimingFrameInfo();
|
||||
|
||||
// Returns end-to-end delay of a frame, if sender and receiver timestamps are
|
||||
// synchronized, -1 otherwise.
|
||||
int64_t EndToEndDelay() const;
|
||||
|
||||
// Returns true if current frame took longer to process than |other| frame.
|
||||
// Returns true if current frame took longer to process than `other` frame.
|
||||
// If other frame's clocks are not synchronized, current frame is always
|
||||
// preferred.
|
||||
bool IsLongerThan(const TimingFrameInfo& other) const;
|
||||
@ -103,26 +107,43 @@ struct TimingFrameInfo {
|
||||
// Minimum and maximum playout delay values from capture to render.
|
||||
// These are best effort values.
|
||||
//
|
||||
// A value < 0 indicates no change from previous valid value.
|
||||
//
|
||||
// min = max = 0 indicates that the receiver should try and render
|
||||
// frame as soon as possible.
|
||||
//
|
||||
// min = x, max = y indicates that the receiver is free to adapt
|
||||
// in the range (x, y) based on network jitter.
|
||||
struct VideoPlayoutDelay {
|
||||
VideoPlayoutDelay() = default;
|
||||
VideoPlayoutDelay(int min_ms, int max_ms) : min_ms(min_ms), max_ms(max_ms) {}
|
||||
int min_ms = -1;
|
||||
int max_ms = -1;
|
||||
// This class ensures invariant 0 <= min <= max <= kMax.
|
||||
class RTC_EXPORT VideoPlayoutDelay {
|
||||
public:
|
||||
// Maximum supported value for the delay limit.
|
||||
static constexpr TimeDelta kMax = TimeDelta::Millis(10) * 0xFFF;
|
||||
|
||||
bool operator==(const VideoPlayoutDelay& rhs) const {
|
||||
return min_ms == rhs.min_ms && max_ms == rhs.max_ms;
|
||||
// Creates delay limits that indicates receiver should try to render frame
|
||||
// as soon as possible.
|
||||
static VideoPlayoutDelay Minimal() {
|
||||
return VideoPlayoutDelay(TimeDelta::Zero(), TimeDelta::Zero());
|
||||
}
|
||||
};
|
||||
|
||||
// TODO(bugs.webrtc.org/7660): Old name, delete after downstream use is updated.
|
||||
using PlayoutDelay = VideoPlayoutDelay;
|
||||
// Creates valid, but unspecified limits.
|
||||
VideoPlayoutDelay() = default;
|
||||
VideoPlayoutDelay(const VideoPlayoutDelay&) = default;
|
||||
VideoPlayoutDelay& operator=(const VideoPlayoutDelay&) = default;
|
||||
VideoPlayoutDelay(TimeDelta min, TimeDelta max);
|
||||
|
||||
bool Set(TimeDelta min, TimeDelta max);
|
||||
|
||||
TimeDelta min() const { return min_; }
|
||||
TimeDelta max() const { return max_; }
|
||||
|
||||
friend bool operator==(const VideoPlayoutDelay& lhs,
|
||||
const VideoPlayoutDelay& rhs) {
|
||||
return lhs.min_ == rhs.min_ && lhs.max_ == rhs.max_;
|
||||
}
|
||||
|
||||
private:
|
||||
TimeDelta min_ = TimeDelta::Zero();
|
||||
TimeDelta max_ = kMax;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
|
Reference in New Issue
Block a user