Update to current webrtc library

This is from the upstream library commit id
3326535126e435f1ba647885ce43a8f0f3d317eb, corresponding to Chromium
88.0.4290.1.
This commit is contained in:
Arun Raghavan 2020-10-12 18:08:02 -04:00
parent b1b02581d3
commit bcec8b0b21
859 changed files with 76187 additions and 49580 deletions

View File

@ -2,7 +2,7 @@ About
===== =====
This is meant to be a more Linux packaging friendly copy of the AudioProcessing This is meant to be a more Linux packaging friendly copy of the AudioProcessing
module from the WebRTC[1][2] project. The ideal case is that we make no changes to module from the WebRTC[1] project. The ideal case is that we make no changes to
the code to make tracking upstream code easy. the code to make tracking upstream code easy.
This package currently only includes the AudioProcessing bits, but I am very This package currently only includes the AudioProcessing bits, but I am very
@ -11,18 +11,17 @@ the code and hopefully eventually have a single point of packaging all the
WebRTC code to help people reuse the code and avoid keeping private copies in WebRTC code to help people reuse the code and avoid keeping private copies in
several different projects. several different projects.
[1] http://code.google.com/p/webrtc/ [1] https://webrtc.googlesource.com/src
[2] https://chromium.googlesource.com/external/webrtc/trunk/webrtc.git
Feedback Feedback
======== ========
Patches, suggestions welcome. You can send them to the PulseAudio mailing Patches, suggestions welcome. You can send them to the PulseAudio mailing
list[3] or to me at the address below. list[2] or to me at the address below.
-- Arun Raghavan <mail@arunraghavan.net> -- Arun Raghavan <mail@arunraghavan.net>
[3] http://lists.freedesktop.org/mailman/listinfo/pulseaudio-discuss [2] http://lists.freedesktop.org/mailman/listinfo/pulseaudio-discuss
Notes Notes
==== ====

View File

@ -8,7 +8,7 @@ project source code.
webrtc git repository Chromium uses. webrtc git repository Chromium uses.
2. Instructions on checking out the Chromium tree are on the 2. Instructions on checking out the Chromium tree are on the
[Chromium site][get-chromium]. As a shortcut, you can look at the DEPS file [WebRTC repo][get-webrtc]. As a shortcut, you can look at the DEPS file
in the Chromium tree for the current webrtc version being used, and then in the Chromium tree for the current webrtc version being used, and then
just use that commit hash with the webrtc tree. just use that commit hash with the webrtc tree.
@ -61,6 +61,6 @@ project source code.
* Run some test streams through the canceller to make sure it is working * Run some test streams through the canceller to make sure it is working
fine. fine.
[get-chromium]: http://dev.chromium.org/developers/how-tos/get-the-code [get-webrtc]: https://webrtc.googlesource.com/src/
[meld]: http://meldmerge.org/ [meld]: http://meldmerge.org/
[libtool-version-info]: https://www.gnu.org/software/libtool/manual/html_node/Updating-version-info.html [libtool-version-info]: https://www.gnu.org/software/libtool/manual/html_node/Updating-version-info.html

View File

@ -1,12 +1,17 @@
project('webrtc-audio-processing', 'c', 'cpp', project('webrtc-audio-processing', 'c', 'cpp',
version : '0.3.1', version : '0.4.0',
meson_version : '>= 0.52', meson_version : '>= 0.52',
default_options : [ 'warning_level=1', default_options : [ 'warning_level=1',
'buildtype=debugoptimized' ]) 'buildtype=debugoptimized',
'c_std=c11',
'cpp_std=c++14',
]
)
soversion = 0 soversion = 0
cc = meson.get_compiler('c') cc = meson.get_compiler('c')
cpp = meson.get_compiler('cpp')
host_system = host_machine.system() host_system = host_machine.system()
@ -16,14 +21,28 @@ os_deps = []
have_posix = false have_posix = false
have_win = false have_win = false
absl_dep = [
cpp.find_library('absl_base'),
cpp.find_library('absl_bad_optional_access'),
cpp.find_library('absl_flags_internal'),
cpp.find_library('absl_flags_marshalling'),
cpp.find_library('absl_flags_parse'),
cpp.find_library('absl_flags_registry'),
cpp.find_library('absl_flags_usage_internal'),
cpp.find_library('absl_raw_logging_internal'),
cpp.find_library('absl_strings'),
cpp.find_library('absl_synchronization'),
cpp.find_library('absl_throw_delegate'),
]
if ['darwin', 'ios'].contains(host_system) if ['darwin', 'ios'].contains(host_system)
os_cflags = ['-DWEBRTC_MAC', '-DWEBRTC_THREAD_RR', '-DWEBRTC_CLOCK_TYPE_REALTIME'] os_cflags = ['-DWEBRTC_MAC']
if host_system == 'ios' if host_system == 'ios'
os_cflags += ['-DWEBRTC_IOS'] os_cflags += ['-DWEBRTC_IOS']
endif endif
have_posix = true have_posix = true
elif host_system == 'android' elif host_system == 'android'
os_cflags += ['-DWEBRTC_ANDROID', '-DWEBRTC_LINUX', '-DWEBRTC_THREAD_RR', '-DWEBRTC_CLOCK_TYPE_REALTIME'] os_cflags += ['-DWEBRTC_ANDROID', '-DWEBRTC_LINUX']
os_deps += [cc.find_library('log')] os_deps += [cc.find_library('log')]
os_deps += [dependency('gnustl', required : get_option('gnustl'))] os_deps += [dependency('gnustl', required : get_option('gnustl'))]
have_posix = true have_posix = true
@ -46,7 +65,10 @@ arch_cflags = []
have_arm = false have_arm = false
have_armv7 = false have_armv7 = false
have_neon = false have_neon = false
have_mips = false
have_mips64 = false
have_x86 = false have_x86 = false
have_avx2 = false
if ['arm', 'armv7'].contains(host_machine.cpu_family()) if ['arm', 'armv7'].contains(host_machine.cpu_family())
if cc.compiles('''#ifdef __ARM_ARCH_ISA_ARM if cc.compiles('''#ifdef __ARM_ARCH_ISA_ARM
#error no arm arch #error no arm arch
@ -67,8 +89,19 @@ if cc.compiles('''#ifndef __aarch64__
have_neon = true have_neon = true
arch_cflags += ['-DWEBRTC_ARCH_ARM64', '-DWEBRTC_HAS_NEON'] arch_cflags += ['-DWEBRTC_ARCH_ARM64', '-DWEBRTC_HAS_NEON']
endif endif
if ['mips', 'mips64'].contains(host_machine.cpu_family())
have_mips = true
arch_cflags += ['WEBRTC_ARCH_MIPS_FAMILY']
endif
if host_machine.cpu_family() == 'mips64'
have_mips64 = true
endif
if ['x86', 'x86_64'].contains(host_machine.cpu_family()) if ['x86', 'x86_64'].contains(host_machine.cpu_family())
have_x86 = true have_x86 = true
# This is unconditionally enabled for now, actual usage is determined by
# runtime CPU detection, so we're just assuming the compiler supports avx2
have_avx2 = true
arch_cflags += ['-DWEBRTC_ENABLE_AVX2']
endif endif
neon_opt = get_option('neon') neon_opt = get_option('neon')
@ -84,16 +117,13 @@ if neon_opt != 'no'
endif endif
endif endif
noise_cflags = [] common_cflags = [
if get_option('ns_mode') == 'float' '-DWEBRTC_LIBRARY_IMPL',
noise_cflags += ['-DWEBRTC_NS_FLOAT=1'] '-DWEBRTC_ENABLE_SYMBOL_EXPORT',
else '-DNDEBUG'
noise_cflags += ['-DWEBRTC_NS_FIXED=1'] ] + platform_cflags + os_cflags + arch_cflags
endif common_cxxflags = common_cflags
common_deps = os_deps + [absl_dep]
common_cflags = ['-DWEBRTC_AUDIO_PROCESSING_ONLY_BUILD', '-DNDEBUG'] + platform_cflags + os_cflags + arch_cflags + noise_cflags
common_cxxflags = ['-std=c++11'] + common_cflags
common_deps = os_deps
webrtc_inc = include_directories('.') webrtc_inc = include_directories('.')
subdir('webrtc') subdir('webrtc')
@ -107,7 +137,7 @@ pkgconfig.generate(
filebase: 'webrtc-audio-processing', filebase: 'webrtc-audio-processing',
subdirs: 'webrtc_audio_processing', subdirs: 'webrtc_audio_processing',
extra_cflags: [ extra_cflags: [
'-DWEBRTC_AUDIO_PROCESSING_ONLY_BUILD', '-DWEBRTC_LIBRARY_IMPL',
] + platform_cflags, ] + platform_cflags,
libraries: libwebrtc_audio_processing, libraries: libwebrtc_audio_processing,
) )
@ -119,7 +149,7 @@ pkgconfig.generate(
filebase: 'webrtc-audio-coding', filebase: 'webrtc-audio-coding',
subdirs: 'webrtc_audio_processing', subdirs: 'webrtc_audio_processing',
extra_cflags: [ extra_cflags: [
'-DWEBRTC_AUDIO_PROCESSING_ONLY_BUILD', '-DWEBRTC_LIBRARY_IMPL',
] + platform_cflags, ] + platform_cflags,
libraries: libwebrtc_audio_coding, libraries: libwebrtc_audio_coding,
) )

View File

@ -1,6 +1,3 @@
option('ns_mode', type: 'combo',
choices : ['float', 'fixed'],
description: 'Noise suppresion mode to use.')
option('gnustl', type: 'feature', option('gnustl', type: 'feature',
value: 'auto', value: 'auto',
description: 'Use gnustl for a c++ library implementation (only used on Android)') description: 'Use gnustl for a c++ library implementation (only used on Android)')

View File

@ -6,22 +6,146 @@
# in the file PATENTS. All contributing project authors may # in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree. # be found in the AUTHORS file in the root of the source tree.
# TODO(kjellander): Rebase this to webrtc/build/common.gypi changes after r6330. # This is the root build file for GN. GN will start processing by loading this
# file, and recursively load all dependencies until all dependencies are either
# resolved or known not to exist (which will cause the build to fail). So if
# you add a new build file, there must be some path of dependencies from this
# file to your new one or GN won't know about it.
import("//build/config/crypto.gni")
import("//build/config/linux/pkg_config.gni") import("//build/config/linux/pkg_config.gni")
import("build/webrtc.gni") import("//build/config/sanitizers/sanitizers.gni")
import("webrtc.gni")
if (rtc_enable_protobuf) {
import("//third_party/protobuf/proto_library.gni") import("//third_party/protobuf/proto_library.gni")
}
if (is_android) {
import("//build/config/android/config.gni")
import("//build/config/android/rules.gni")
}
if (!build_with_chromium) {
# This target should (transitively) cause everything to be built; if you run
# 'ninja default' and then 'ninja all', the second build should do no work.
group("default") {
testonly = true
deps = [ ":webrtc" ]
if (rtc_build_examples) {
deps += [ "examples" ]
}
if (rtc_build_tools) {
deps += [ "rtc_tools" ]
}
if (rtc_include_tests) {
deps += [
":rtc_unittests",
":slow_tests",
":video_engine_tests",
":voip_unittests",
":webrtc_nonparallel_tests",
":webrtc_perf_tests",
"common_audio:common_audio_unittests",
"common_video:common_video_unittests",
"examples:examples_unittests",
"media:rtc_media_unittests",
"modules:modules_tests",
"modules:modules_unittests",
"modules/audio_coding:audio_coding_tests",
"modules/audio_processing:audio_processing_tests",
"modules/remote_bitrate_estimator:rtp_to_text",
"modules/rtp_rtcp:test_packet_masks_metrics",
"modules/video_capture:video_capture_internal_impl",
"pc:peerconnection_unittests",
"pc:rtc_pc_unittests",
"rtc_tools:rtp_generator",
"rtc_tools:video_replay",
"stats:rtc_stats_unittests",
"system_wrappers:system_wrappers_unittests",
"test",
"video:screenshare_loopback",
"video:sv_loopback",
"video:video_loopback",
]
if (!is_asan) {
# Do not build :webrtc_lib_link_test because lld complains on some OS
# (e.g. when target_os = "mac") when is_asan=true. For more details,
# see bugs.webrtc.org/11027#c5.
deps += [ ":webrtc_lib_link_test" ]
}
if (is_android) {
deps += [
"examples:android_examples_junit_tests",
"sdk/android:android_instrumentation_test_apk",
"sdk/android:android_sdk_junit_tests",
]
} else {
deps += [ "modules/video_capture:video_capture_tests" ]
}
if (rtc_enable_protobuf) {
deps += [
"audio:low_bandwidth_audio_test",
"logging:rtc_event_log_rtp_dump",
"tools_webrtc/perf:webrtc_dashboard_upload",
]
}
}
}
}
# Abseil Flags by default doesn't register command line flags on mobile
# platforms, WebRTC tests requires them (e.g. on simualtors) so this
# config will be applied to testonly targets globally (see webrtc.gni).
config("absl_flags_configs") {
defines = [ "ABSL_FLAGS_STRIP_NAMES=0" ]
}
config("library_impl_config") {
# Build targets that contain WebRTC implementation need this macro to
# be defined in order to correctly export symbols when is_component_build
# is true.
# For more info see: rtc_base/build/rtc_export.h.
defines = [ "WEBRTC_LIBRARY_IMPL" ]
}
# Contains the defines and includes in common.gypi that are duplicated both as # Contains the defines and includes in common.gypi that are duplicated both as
# target_defaults and direct_dependent_settings. # target_defaults and direct_dependent_settings.
config("common_inherited_config") { config("common_inherited_config") {
defines = [] defines = []
cflags = []
ldflags = []
if (rtc_enable_symbol_export || is_component_build) {
defines = [ "WEBRTC_ENABLE_SYMBOL_EXPORT" ]
}
if (build_with_mozilla) { if (build_with_mozilla) {
defines += [ "WEBRTC_MOZILLA_BUILD" ] defines += [ "WEBRTC_MOZILLA_BUILD" ]
} }
if (!rtc_builtin_ssl_root_certificates) {
defines += [ "WEBRTC_EXCLUDE_BUILT_IN_SSL_ROOT_CERTS" ]
}
if (rtc_disable_check_msg) {
defines += [ "RTC_DISABLE_CHECK_MSG" ]
}
if (rtc_enable_avx2) {
defines += [ "WEBRTC_ENABLE_AVX2" ]
}
# Some tests need to declare their own trace event handlers. If this define is
# not set, the first time TRACE_EVENT_* is called it will store the return
# value for the current handler in an static variable, so that subsequent
# changes to the handler for that TRACE_EVENT_* will be ignored.
# So when tests are included, we set this define, making it possible to use
# different event handlers in different tests.
if (rtc_include_tests) {
defines += [ "WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1" ]
} else {
defines += [ "WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=0" ]
}
if (build_with_chromium) { if (build_with_chromium) {
defines = [ "WEBRTC_CHROMIUM_BUILD" ] defines += [ "WEBRTC_CHROMIUM_BUILD" ]
include_dirs = [ include_dirs = [
# The overrides must be included first as that is the mechanism for # The overrides must be included first as that is the mechanism for
# selecting the override headers in Chromium. # selecting the override headers in Chromium.
@ -29,10 +153,18 @@ config("common_inherited_config") {
# Allow includes to be prefixed with webrtc/ in case it is not an # Allow includes to be prefixed with webrtc/ in case it is not an
# immediate subdirectory of the top-level. # immediate subdirectory of the top-level.
"..", ".",
# Just like the root WebRTC directory is added to include path, the
# corresponding directory tree with generated files needs to be added too.
# Note: this path does not change depending on the current target, e.g.
# it is always "//gen/third_party/webrtc" when building with Chromium.
# See also: http://cs.chromium.org/?q=%5C"default_include_dirs
# https://gn.googlesource.com/gn/+/master/docs/reference.md#target_gen_dir
target_gen_dir,
] ]
} }
if (is_posix) { if (is_posix || is_fuchsia) {
defines += [ "WEBRTC_POSIX" ] defines += [ "WEBRTC_POSIX" ]
} }
if (is_ios) { if (is_ios) {
@ -41,15 +173,15 @@ config("common_inherited_config") {
"WEBRTC_IOS", "WEBRTC_IOS",
] ]
} }
if (is_ios && rtc_use_objc_h264) { if (is_linux || is_chromeos) {
defines += [ "WEBRTC_OBJC_H264" ]
}
if (is_linux) {
defines += [ "WEBRTC_LINUX" ] defines += [ "WEBRTC_LINUX" ]
} }
if (is_mac) { if (is_mac) {
defines += [ "WEBRTC_MAC" ] defines += [ "WEBRTC_MAC" ]
} }
if (is_fuchsia) {
defines += [ "WEBRTC_FUCHSIA" ]
}
if (is_win) { if (is_win) {
defines += [ "WEBRTC_WIN" ] defines += [ "WEBRTC_WIN" ]
} }
@ -58,52 +190,136 @@ config("common_inherited_config") {
"WEBRTC_LINUX", "WEBRTC_LINUX",
"WEBRTC_ANDROID", "WEBRTC_ANDROID",
] ]
if (build_with_mozilla) {
defines += [ "WEBRTC_ANDROID_OPENSLES" ]
}
}
if (is_chromeos) {
defines += [ "CHROMEOS" ]
}
if (rtc_sanitize_coverage != "") {
assert(is_clang, "sanitizer coverage requires clang")
cflags += [ "-fsanitize-coverage=${rtc_sanitize_coverage}" ]
ldflags += [ "-fsanitize-coverage=${rtc_sanitize_coverage}" ]
}
if (is_ubsan) {
cflags += [ "-fsanitize=float-cast-overflow" ]
} }
} }
if (rtc_have_dbus_glib) { # TODO(bugs.webrtc.org/9693): Remove the possibility to suppress this warning
pkg_config("dbus-glib") { # as soon as WebRTC compiles without it.
packages = [ "dbus-glib-1" ] config("no_exit_time_destructors") {
if (is_clang) {
cflags = [ "-Wno-exit-time-destructors" ]
}
}
# TODO(bugs.webrtc.org/9693): Remove the possibility to suppress this warning
# as soon as WebRTC compiles without it.
config("no_global_constructors") {
if (is_clang) {
cflags = [ "-Wno-global-constructors" ]
}
}
config("rtc_prod_config") {
# Ideally, WebRTC production code (but not test code) should have these flags.
if (is_clang) {
cflags = [
"-Wexit-time-destructors",
"-Wglobal-constructors",
]
} }
} }
config("common_config") { config("common_config") {
cflags = [] cflags = []
cflags_c = []
cflags_cc = [] cflags_cc = []
if (rtc_restrict_logging) { cflags_objc = []
defines = [ "WEBRTC_RESTRICT_LOGGING" ] defines = []
if (rtc_enable_protobuf) {
defines += [ "WEBRTC_ENABLE_PROTOBUF=1" ]
} else {
defines += [ "WEBRTC_ENABLE_PROTOBUF=0" ]
} }
if (rtc_have_dbus_glib) { if (rtc_include_internal_audio_device) {
defines += [ "HAVE_DBUS_GLIB" ] defines += [ "WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE" ]
# TODO(kjellander): Investigate this, it seems like include <dbus/dbus.h>
# is still not found even if the execution of
# build/config/linux/pkg-config.py dbus-glib-1 returns correct include
# dirs on Linux.
all_dependent_configs = [ "dbus-glib" ]
} }
if (rtc_libvpx_build_vp9) {
defines += [ "RTC_ENABLE_VP9" ]
}
if (rtc_enable_sctp) {
defines += [ "HAVE_SCTP" ]
}
if (rtc_enable_external_auth) {
defines += [ "ENABLE_EXTERNAL_AUTH" ]
}
if (rtc_use_h264) {
defines += [ "WEBRTC_USE_H264" ]
}
if (rtc_use_absl_mutex) {
defines += [ "WEBRTC_ABSL_MUTEX" ]
}
if (rtc_disable_logging) {
defines += [ "RTC_DISABLE_LOGGING" ]
}
if (rtc_disable_trace_events) {
defines += [ "RTC_DISABLE_TRACE_EVENTS" ]
}
if (rtc_disable_metrics) {
defines += [ "RTC_DISABLE_METRICS" ]
}
if (rtc_exclude_transient_suppressor) {
defines += [ "WEBRTC_EXCLUDE_TRANSIENT_SUPPRESSOR" ]
}
if (rtc_exclude_audio_processing_module) {
defines += [ "WEBRTC_EXCLUDE_AUDIO_PROCESSING_MODULE" ]
}
cflags = []
if (build_with_chromium) { if (build_with_chromium) {
defines += [ "LOGGING_INSIDE_WEBRTC" ] defines += [
} else { # NOTICE: Since common_inherited_config is used in public_configs for our
if (is_posix) { # targets, there's no point including the defines in that config here.
# -Wextra is currently disabled in Chromium"s common.gypi. Enable # TODO(kjellander): Cleanup unused ones and move defines closer to the
# for targets that can handle it. For Android/arm64 right now # source when webrtc:4256 is completed.
# there will be an "enumeral and non-enumeral type in conditional "HAVE_WEBRTC_VIDEO",
# expression" warning in android_tools/ndk_experimental"s version "LOGGING_INSIDE_WEBRTC",
# of stlport.
# See: https://code.google.com/p/chromium/issues/detail?id=379699
if (current_cpu != "arm64" || !is_android) {
cflags = [
"-Wextra",
# We need to repeat some flags from Chromium"s common.gypi
# here that get overridden by -Wextra.
"-Wno-unused-parameter",
"-Wno-missing-field-initializers",
"-Wno-strict-overflow",
] ]
} else {
if (is_posix || is_fuchsia) {
cflags_c += [
# TODO(bugs.webrtc.org/9029): enable commented compiler flags.
# Some of these flags should also be added to cflags_objc.
# "-Wextra", (used when building C++ but not when building C)
# "-Wmissing-prototypes", (C/Obj-C only)
# "-Wmissing-declarations", (ensure this is always used C/C++, etc..)
"-Wstrict-prototypes",
# "-Wpointer-arith", (ensure this is always used C/C++, etc..)
# "-Wbad-function-cast", (C/Obj-C only)
# "-Wnested-externs", (C/Obj-C only)
]
cflags_objc += [ "-Wstrict-prototypes" ]
cflags_cc = [ cflags_cc = [
"-Wnon-virtual-dtor", "-Wnon-virtual-dtor",
@ -111,10 +327,36 @@ config("common_config") {
"-Woverloaded-virtual", "-Woverloaded-virtual",
] ]
} }
}
if (is_clang) { if (is_clang) {
cflags += [ "-Wthread-safety" ] cflags += [
"-Wc++11-narrowing",
"-Wimplicit-fallthrough",
"-Wthread-safety",
"-Winconsistent-missing-override",
"-Wundef",
]
# use_xcode_clang only refers to the iOS toolchain, host binaries use
# chromium's clang always.
if (!is_nacl &&
(!use_xcode_clang || current_toolchain == host_toolchain)) {
# Flags NaCl (Clang 3.7) and Xcode 7.3 (Clang clang-703.0.31) do not
# recognize.
cflags += [ "-Wunused-lambda-capture" ]
}
}
if (is_win && !is_clang) {
# MSVC warning suppressions (needed to use Abseil).
# TODO(bugs.webrtc.org/9274): Remove these warnings as soon as MSVC allows
# external headers warning suppression (or fix them upstream).
cflags += [ "/wd4702" ] # unreachable code
# MSVC 2019 warning suppressions for C++17 compiling
cflags +=
[ "/wd5041" ] # out-of-line definition for constexpr static data
# member is not needed and is deprecated in C++17
} }
} }
@ -129,8 +371,6 @@ config("common_config") {
defines += [ "WEBRTC_ARCH_ARM_V7" ] defines += [ "WEBRTC_ARCH_ARM_V7" ]
if (arm_use_neon) { if (arm_use_neon) {
defines += [ "WEBRTC_HAS_NEON" ] defines += [ "WEBRTC_HAS_NEON" ]
} else if (arm_optionally_use_neon) {
defines += [ "WEBRTC_DETECT_NEON" ]
} }
} }
} }
@ -163,119 +403,325 @@ config("common_config") {
"-fno-builtin-sinf", "-fno-builtin-sinf",
] ]
} }
if (use_fuzzing_engine && optimize_for_fuzzing) {
# Used in Chromium's overrides to disable logging
defines += [ "WEBRTC_UNSAFE_FUZZER_MODE" ]
} }
source_set("webrtc") { if (!build_with_chromium && rtc_win_undef_unicode) {
sources = [ cflags += [
"call.h", "/UUNICODE",
"config.h", "/U_UNICODE",
"frame_callback.h",
"transport.h",
]
defines = []
configs += [ ":common_config" ]
public_configs = [ ":common_inherited_config" ]
deps = [
"audio",
":webrtc_common",
"base:rtc_base",
"call",
"common_audio",
"common_video",
"modules/audio_coding",
"modules/audio_conference_mixer",
"modules/audio_device",
"modules/audio_processing",
"modules/bitrate_controller",
"modules/desktop_capture",
"modules/media_file",
"modules/rtp_rtcp",
"modules/utility",
"modules/video_coding",
"modules/video_processing",
"system_wrappers",
"tools",
"video",
"voice_engine",
]
if (build_with_chromium) {
deps += [
"modules/video_capture",
"modules/video_render",
] ]
} }
}
if (rtc_enable_protobuf) { config("common_objc") {
defines += [ "ENABLE_RTC_EVENT_LOG" ] frameworks = [ "Foundation.framework" ]
deps += [ ":rtc_event_log_proto" ]
if (rtc_use_metal_rendering) {
defines = [ "RTC_SUPPORTS_METAL" ]
} }
} }
if (!build_with_chromium) { if (!build_with_chromium) {
executable("webrtc_tests") { # Target to build all the WebRTC production code.
rtc_static_library("webrtc") {
# Only the root target and the test should depend on this.
visibility = [
"//:default",
"//:webrtc_lib_link_test",
]
sources = []
complete_static_lib = true
suppressed_configs += [ "//build/config/compiler:thin_archive" ]
defines = []
deps = [
"api:create_peerconnection_factory",
"api:libjingle_peerconnection_api",
"api:rtc_error",
"api:transport_api",
"api/crypto",
"api/rtc_event_log:rtc_event_log_factory",
"api/task_queue",
"api/task_queue:default_task_queue_factory",
"audio",
"call",
"common_audio",
"common_video",
"logging:rtc_event_log_api",
"media",
"modules",
"modules/video_capture:video_capture_internal_impl",
"p2p:rtc_p2p",
"pc:libjingle_peerconnection",
"pc:peerconnection",
"pc:rtc_pc",
"pc:rtc_pc_base",
"rtc_base",
"sdk",
"video",
]
if (rtc_include_builtin_audio_codecs) {
deps += [
"api/audio_codecs:builtin_audio_decoder_factory",
"api/audio_codecs:builtin_audio_encoder_factory",
]
}
if (rtc_include_builtin_video_codecs) {
deps += [
"api/video_codecs:builtin_video_decoder_factory",
"api/video_codecs:builtin_video_encoder_factory",
]
}
if (build_with_mozilla) {
deps += [
"api/video:video_frame",
"api/video:video_rtp_headers",
]
} else {
deps += [
"api",
"logging",
"p2p",
"pc",
"stats",
]
}
if (rtc_enable_protobuf) {
deps += [ "logging:rtc_event_log_proto" ]
}
}
if (rtc_include_tests && !is_asan) {
rtc_executable("webrtc_lib_link_test") {
testonly = true
sources = [ "webrtc_lib_link_test.cc" ]
deps = [
# NOTE: Don't add deps here. If this test fails to link, it means you
# need to add stuff to the webrtc static lib target above.
":webrtc",
]
}
}
}
if (use_libfuzzer || use_afl) {
# This target is only here for gn to discover fuzzer build targets under
# webrtc/test/fuzzers/.
group("webrtc_fuzzers_dummy") {
testonly = true
deps = [ "test/fuzzers:webrtc_fuzzer_main" ]
}
}
if (rtc_include_tests) {
rtc_test("rtc_unittests") {
testonly = true
deps = [
"api:compile_all_headers",
"api:rtc_api_unittests",
"api/audio/test:audio_api_unittests",
"api/audio_codecs/test:audio_codecs_api_unittests",
"api/numerics:numerics_unittests",
"api/transport:stun_unittest",
"api/video/test:rtc_api_video_unittests",
"api/video_codecs/test:video_codecs_api_unittests",
"call:fake_network_pipe_unittests",
"p2p:libstunprober_unittests",
"p2p:rtc_p2p_unittests",
"rtc_base:robo_caller_unittests",
"rtc_base:rtc_base_approved_unittests",
"rtc_base:rtc_base_unittests",
"rtc_base:rtc_json_unittests",
"rtc_base:rtc_numerics_unittests",
"rtc_base:rtc_operations_chain_unittests",
"rtc_base:rtc_task_queue_unittests",
"rtc_base:sigslot_unittest",
"rtc_base:untyped_function_unittest",
"rtc_base:weak_ptr_unittests",
"rtc_base/experiments:experiments_unittests",
"rtc_base/synchronization:sequence_checker_unittests",
"rtc_base/task_utils:pending_task_safety_flag_unittests",
"rtc_base/task_utils:to_queued_task_unittests",
"sdk:sdk_tests",
"test:rtp_test_utils",
"test:test_main",
"test/network:network_emulation_unittests",
]
if (rtc_enable_protobuf) {
deps += [ "logging:rtc_event_log_tests" ]
}
if (is_android) {
# Do not use Chromium's launcher. native_unittests defines its own JNI_OnLoad.
use_default_launcher = false
deps += [
"sdk/android:native_unittests",
"sdk/android:native_unittests_java",
"//testing/android/native_test:native_test_support",
]
shard_timeout = 900
}
if (is_ios || is_mac) {
deps += [ "sdk:rtc_unittests_objc" ]
}
}
rtc_test("benchmarks") {
testonly = true testonly = true
deps = [ deps = [
":webrtc", "rtc_base/synchronization:mutex_benchmark",
"modules/video_render:video_render_internal_impl", "test:benchmark_main",
"modules/video_capture:video_capture_internal_impl",
"test",
]
}
}
source_set("webrtc_common") {
sources = [
"common_types.cc",
"common_types.h",
"config.cc",
"config.h",
"engine_configurations.h",
"typedefs.h",
]
configs += [ ":common_config" ]
public_configs = [ ":common_inherited_config" ]
}
source_set("gtest_prod") {
sources = [
"test/testsupport/gtest_prod_util.h",
] ]
} }
if (rtc_enable_protobuf) { # This runs tests that must run in real time and therefore can take some
proto_library("rtc_event_log_proto") { # time to execute. They are in a separate executable to avoid making the
sources = [ # regular unittest suite too slow to run frequently.
"call/rtc_event_log.proto", rtc_test("slow_tests") {
] testonly = true
proto_out_dir = "webrtc/call"
}
}
source_set("rtc_event_log") {
sources = [
"call/rtc_event_log.cc",
"call/rtc_event_log.h",
]
defines = []
configs += [ ":common_config" ]
public_configs = [ ":common_inherited_config" ]
deps = [ deps = [
":webrtc_common", "rtc_base/task_utils:repeating_task_unittests",
"test:test_main",
]
}
# TODO(pbos): Rename test suite, this is no longer "just" for video targets.
video_engine_tests_resources = [
"resources/foreman_cif_short.yuv",
"resources/voice_engine/audio_long16.pcm",
] ]
if (rtc_enable_protobuf) { if (is_ios) {
defines += [ "ENABLE_RTC_EVENT_LOG" ] bundle_data("video_engine_tests_bundle_data") {
deps += [ ":rtc_event_log_proto" ] testonly = true
} sources = video_engine_tests_resources
if (is_clang && !is_nacl) { outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
# Suppress warnings from Chrome's Clang plugins.
# See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
configs -= [ "//build/config/clang:find_bad_constructs" ]
} }
} }
rtc_test("video_engine_tests") {
testonly = true
deps = [
"audio:audio_tests",
# TODO(eladalon): call_tests aren't actually video-specific, so we
# should move them to a more appropriate test suite.
"call:call_tests",
"call/adaptation:resource_adaptation_tests",
"test:test_common",
"test:test_main",
"test:video_test_common",
"video:video_tests",
"video/adaptation:video_adaptation_tests",
]
data = video_engine_tests_resources
if (is_android) {
deps += [ "//testing/android/native_test:native_test_native_code" ]
shard_timeout = 900
}
if (is_ios) {
deps += [ ":video_engine_tests_bundle_data" ]
}
}
webrtc_perf_tests_resources = [
"resources/ConferenceMotion_1280_720_50.yuv",
"resources/audio_coding/speech_mono_16kHz.pcm",
"resources/audio_coding/speech_mono_32_48kHz.pcm",
"resources/audio_coding/testfile32kHz.pcm",
"resources/difficult_photo_1850_1110.yuv",
"resources/foreman_cif.yuv",
"resources/paris_qcif.yuv",
"resources/photo_1850_1110.yuv",
"resources/presentation_1850_1110.yuv",
"resources/voice_engine/audio_long16.pcm",
"resources/web_screenshot_1850_1110.yuv",
]
if (is_ios) {
bundle_data("webrtc_perf_tests_bundle_data") {
testonly = true
sources = webrtc_perf_tests_resources
outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
}
}
rtc_test("webrtc_perf_tests") {
testonly = true
deps = [
"audio:audio_perf_tests",
"call:call_perf_tests",
"modules/audio_coding:audio_coding_perf_tests",
"modules/audio_processing:audio_processing_perf_tests",
"pc:peerconnection_perf_tests",
"test:test_main",
"video:video_full_stack_tests",
"video:video_pc_full_stack_tests",
]
data = webrtc_perf_tests_resources
if (is_android) {
deps += [ "//testing/android/native_test:native_test_native_code" ]
shard_timeout = 4500
}
if (is_ios) {
deps += [ ":webrtc_perf_tests_bundle_data" ]
}
}
rtc_test("webrtc_nonparallel_tests") {
testonly = true
deps = [ "rtc_base:rtc_base_nonparallel_tests" ]
if (is_android) {
deps += [ "//testing/android/native_test:native_test_support" ]
shard_timeout = 900
}
}
rtc_test("voip_unittests") {
testonly = true
deps = [
"api/voip:voip_engine_factory_unittests",
"audio/voip/test:audio_channel_unittests",
"audio/voip/test:audio_egress_unittests",
"audio/voip/test:audio_ingress_unittests",
"audio/voip/test:voip_core_unittests",
"test:test_main",
]
}
}
# ---- Poisons ----
#
# Here is one empty dummy target for each poison type (needed because
# "being poisonous with poison type foo" is implemented as "depends on
# //:poison_foo").
#
# The set of poison_* targets needs to be kept in sync with the
# `all_poison_types` list in webrtc.gni.
#
group("poison_audio_codecs") {
}
group("poison_default_task_queue") {
}
group("poison_rtc_json") {
}
group("poison_software_video_codecs") {
}

29
webrtc/LICENSE Normal file
View File

@ -0,0 +1,29 @@
Copyright (c) 2011, The WebRTC project authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
* Neither the name of Google nor the names of its contributors may
be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -1,419 +0,0 @@
This source tree contains third party source code which is governed by third
party licenses. Paths to the files and associated licenses are collected here.
Files governed by third party licenses:
common_audio/fft4g.c
common_audio/signal_processing/spl_sqrt_floor.c
common_audio/signal_processing/spl_sqrt_floor_arm.S
modules/audio_coding/codecs/g711/main/source/g711.c
modules/audio_coding/codecs/g711/main/source/g711.h
modules/audio_coding/codecs/g722/main/source/g722_decode.c
modules/audio_coding/codecs/g722/main/source/g722_enc_dec.h
modules/audio_coding/codecs/g722/main/source/g722_encode.c
modules/audio_coding/codecs/isac/main/source/fft.c
modules/audio_device/mac/portaudio/pa_memorybarrier.h
modules/audio_device/mac/portaudio/pa_ringbuffer.c
modules/audio_device/mac/portaudio/pa_ringbuffer.h
modules/audio_processing/aec/aec_rdft.c
system_wrappers/source/condition_variable_event_win.cc
system_wrappers/source/set_thread_name_win.h
system_wrappers/source/spreadsortlib/constants.hpp
system_wrappers/source/spreadsortlib/spreadsort.hpp
Individual licenses for each file:
-------------------------------------------------------------------------------
Files:
common_audio/signal_processing/spl_sqrt_floor.c
common_audio/signal_processing/spl_sqrt_floor_arm.S
License:
/*
* Written by Wilco Dijkstra, 1996. The following email exchange establishes the
* license.
*
* From: Wilco Dijkstra <Wilco.Dijkstra@ntlworld.com>
* Date: Fri, Jun 24, 2011 at 3:20 AM
* Subject: Re: sqrt routine
* To: Kevin Ma <kma@google.com>
* Hi Kevin,
* Thanks for asking. Those routines are public domain (originally posted to
* comp.sys.arm a long time ago), so you can use them freely for any purpose.
* Cheers,
* Wilco
*
* ----- Original Message -----
* From: "Kevin Ma" <kma@google.com>
* To: <Wilco.Dijkstra@ntlworld.com>
* Sent: Thursday, June 23, 2011 11:44 PM
* Subject: Fwd: sqrt routine
* Hi Wilco,
* I saw your sqrt routine from several web sites, including
* http://www.finesse.demon.co.uk/steven/sqrt.html.
* Just wonder if there's any copyright information with your Successive
* approximation routines, or if I can freely use it for any purpose.
* Thanks.
* Kevin
*/
-------------------------------------------------------------------------------
Files:
modules/audio_coding/codecs/g711/main/source/g711.c
modules/audio_coding/codecs/g711/main/source/g711.h
License:
/*
* SpanDSP - a series of DSP components for telephony
*
* g711.h - In line A-law and u-law conversion routines
*
* Written by Steve Underwood <steveu@coppice.org>
*
* Copyright (C) 2001 Steve Underwood
*
* Despite my general liking of the GPL, I place this code in the
* public domain for the benefit of all mankind - even the slimy
* ones who might try to proprietize my work and use it to my
* detriment.
*/
-------------------------------------------------------------------------------
Files:
modules/audio_coding/codecs/g722/main/source/g722_decode.c
modules/audio_coding/codecs/g722/main/source/g722_enc_dec.h
modules/audio_coding/codecs/g722/main/source/g722_encode.c
License:
/*
* SpanDSP - a series of DSP components for telephony
*
* g722_decode.c - The ITU G.722 codec, decode part.
*
* Written by Steve Underwood <steveu@coppice.org>
*
* Copyright (C) 2005 Steve Underwood
*
* Despite my general liking of the GPL, I place my own contributions
* to this code in the public domain for the benefit of all mankind -
* even the slimy ones who might try to proprietize my work and use it
* to my detriment.
*
* Based in part on a single channel G.722 codec which is:
*
* Copyright (c) CMU 1993
* Computer Science, Speech Group
* Chengxiang Lu and Alex Hauptmann
*/
-------------------------------------------------------------------------------
Files:
modules/audio_coding/codecs/isac/main/source/fft.c
License:
/*
* Copyright(c)1995,97 Mark Olesen <olesen@me.QueensU.CA>
* Queen's Univ at Kingston (Canada)
*
* Permission to use, copy, modify, and distribute this software for
* any purpose without fee is hereby granted, provided that this
* entire notice is included in all copies of any software which is
* or includes a copy or modification of this software and in all
* copies of the supporting documentation for such software.
*
* THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR
* IMPLIED WARRANTY. IN PARTICULAR, NEITHER THE AUTHOR NOR QUEEN'S
* UNIVERSITY AT KINGSTON MAKES ANY REPRESENTATION OR WARRANTY OF ANY
* KIND CONCERNING THE MERCHANTABILITY OF THIS SOFTWARE OR ITS
* FITNESS FOR ANY PARTICULAR PURPOSE.
*
* All of which is to say that you can do what you like with this
* source code provided you don't try to sell it as your own and you
* include an unaltered copy of this message (including the
* copyright).
*
* It is also implicitly understood that bug fixes and improvements
* should make their way back to the general Internet community so
* that everyone benefits.
*/
-------------------------------------------------------------------------------
Files:
modules/audio_device/mac/portaudio/pa_memorybarrier.h
modules/audio_device/mac/portaudio/pa_ringbuffer.c
modules/audio_device/mac/portaudio/pa_ringbuffer.h
License:
/*
* $Id: pa_memorybarrier.h 1240 2007-07-17 13:05:07Z bjornroche $
* Portable Audio I/O Library
* Memory barrier utilities
*
* Author: Bjorn Roche, XO Audio, LLC
*
* This program uses the PortAudio Portable Audio Library.
* For more information see: http://www.portaudio.com
* Copyright (c) 1999-2000 Ross Bencina and Phil Burk
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files
* (the "Software"), to deal in the Software without restriction,
* including without limitation the rights to use, copy, modify, merge,
* publish, distribute, sublicense, and/or sell copies of the Software,
* and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
* ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
/*
* The text above constitutes the entire PortAudio license; however,
* the PortAudio community also makes the following non-binding requests:
*
* Any person wishing to distribute modifications to the Software is
* requested to send the modifications to the original developer so that
* they can be incorporated into the canonical version. It is also
* requested that these non-binding requests be included along with the
* license above.
*/
/*
* $Id: pa_ringbuffer.c 1421 2009-11-18 16:09:05Z bjornroche $
* Portable Audio I/O Library
* Ring Buffer utility.
*
* Author: Phil Burk, http://www.softsynth.com
* modified for SMP safety on Mac OS X by Bjorn Roche
* modified for SMP safety on Linux by Leland Lucius
* also, allowed for const where possible
* modified for multiple-byte-sized data elements by Sven Fischer
*
* Note that this is safe only for a single-thread reader and a
* single-thread writer.
*
* This program uses the PortAudio Portable Audio Library.
* For more information see: http://www.portaudio.com
* Copyright (c) 1999-2000 Ross Bencina and Phil Burk
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files
* (the "Software"), to deal in the Software without restriction,
* including without limitation the rights to use, copy, modify, merge,
* publish, distribute, sublicense, and/or sell copies of the Software,
* and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
* ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
/*
* The text above constitutes the entire PortAudio license; however,
* the PortAudio community also makes the following non-binding requests:
*
* Any person wishing to distribute modifications to the Software is
* requested to send the modifications to the original developer so that
* they can be incorporated into the canonical version. It is also
* requested that these non-binding requests be included along with the
* license above.
*/
-------------------------------------------------------------------------------
Files:
common_audio/fft4g.c
modules/audio_processing/aec/aec_rdft.c
License:
/*
* http://www.kurims.kyoto-u.ac.jp/~ooura/fft.html
* Copyright Takuya OOURA, 1996-2001
*
* You may use, copy, modify and distribute this code for any purpose (include
* commercial use) and without fee. Please refer to this package when you modify
* this code.
*/
-------------------------------------------------------------------------------
Files:
system_wrappers/source/condition_variable_event_win.cc
Source:
http://www1.cse.wustl.edu/~schmidt/ACE-copying.html
License:
Copyright and Licensing Information for ACE(TM), TAO(TM), CIAO(TM), DAnCE(TM),
and CoSMIC(TM)
ACE(TM), TAO(TM), CIAO(TM), DAnCE>(TM), and CoSMIC(TM) (henceforth referred to
as "DOC software") are copyrighted by Douglas C. Schmidt and his research
group at Washington University, University of California, Irvine, and
Vanderbilt University, Copyright (c) 1993-2009, all rights reserved. Since DOC
software is open-source, freely available software, you are free to use,
modify, copy, and distribute--perpetually and irrevocably--the DOC software
source code and object code produced from the source, as well as copy and
distribute modified versions of this software. You must, however, include this
copyright statement along with any code built using DOC software that you
release. No copyright statement needs to be provided if you just ship binary
executables of your software products.
You can use DOC software in commercial and/or binary software releases and are
under no obligation to redistribute any of your source code that is built
using DOC software. Note, however, that you may not misappropriate the DOC
software code, such as copyrighting it yourself or claiming authorship of the
DOC software code, in a way that will prevent DOC software from being
distributed freely using an open-source development model. You needn't inform
anyone that you're using DOC software in your software, though we encourage
you to let us know so we can promote your project in the DOC software success
stories.
The ACE, TAO, CIAO, DAnCE, and CoSMIC web sites are maintained by the DOC
Group at the Institute for Software Integrated Systems (ISIS) and the Center
for Distributed Object Computing of Washington University, St. Louis for the
development of open-source software as part of the open-source software
community. Submissions are provided by the submitter ``as is'' with no
warranties whatsoever, including any warranty of merchantability,
noninfringement of third party intellectual property, or fitness for any
particular purpose. In no event shall the submitter be liable for any direct,
indirect, special, exemplary, punitive, or consequential damages, including
without limitation, lost profits, even if advised of the possibility of such
damages. Likewise, DOC software is provided as is with no warranties of any
kind, including the warranties of design, merchantability, and fitness for a
particular purpose, noninfringement, or arising from a course of dealing,
usage or trade practice. Washington University, UC Irvine, Vanderbilt
University, their employees, and students shall have no liability with respect
to the infringement of copyrights, trade secrets or any patents by DOC
software or any part thereof. Moreover, in no event will Washington
University, UC Irvine, or Vanderbilt University, their employees, or students
be liable for any lost revenue or profits or other special, indirect and
consequential damages.
DOC software is provided with no support and without any obligation on the
part of Washington University, UC Irvine, Vanderbilt University, their
employees, or students to assist in its use, correction, modification, or
enhancement. A number of companies around the world provide commercial support
for DOC software, however. DOC software is Y2K-compliant, as long as the
underlying OS platform is Y2K-compliant. Likewise, DOC software is compliant
with the new US daylight savings rule passed by Congress as "The Energy Policy
Act of 2005," which established new daylight savings times (DST) rules for the
United States that expand DST as of March 2007. Since DOC software obtains
time/date and calendaring information from operating systems users will not be
affected by the new DST rules as long as they upgrade their operating systems
accordingly.
The names ACE(TM), TAO(TM), CIAO(TM), DAnCE(TM), CoSMIC(TM), Washington
University, UC Irvine, and Vanderbilt University, may not be used to endorse
or promote products or services derived from this source without express
written permission from Washington University, UC Irvine, or Vanderbilt
University. This license grants no permission to call products or services
derived from this source ACE(TM), TAO(TM), CIAO(TM), DAnCE(TM), or CoSMIC(TM),
nor does it grant permission for the name Washington University, UC Irvine, or
Vanderbilt University to appear in their names.
-------------------------------------------------------------------------------
Files:
system_wrappers/source/set_thread_name_win.h
Source:
http://msdn.microsoft.com/en-us/cc300389.aspx#P
License:
This license governs use of code marked as “sample” or “example” available on
this web site without a license agreement, as provided under the section above
titled “NOTICE SPECIFIC TO SOFTWARE AVAILABLE ON THIS WEB SITE.” If you use
such code (the “software”), you accept this license. If you do not accept the
license, do not use the software.
1. Definitions
The terms “reproduce,” “reproduction,” “derivative works,” and “distribution”
have the same meaning here as under U.S. copyright law.
A “contribution” is the original software, or any additions or changes to the
software.
A “contributor” is any person that distributes its contribution under this
license.
“Licensed patents” are a contributors patent claims that read directly on its
contribution.
2. Grant of Rights
(A) Copyright Grant - Subject to the terms of this license, including the
license conditions and limitations in section 3, each contributor grants you a
non-exclusive, worldwide, royalty-free copyright license to reproduce its
contribution, prepare derivative works of its contribution, and distribute its
contribution or any derivative works that you create.
(B) Patent Grant - Subject to the terms of this license, including the license
conditions and limitations in section 3, each contributor grants you a
non-exclusive, worldwide, royalty-free license under its licensed patents to
make, have made, use, sell, offer for sale, import, and/or otherwise dispose
of its contribution in the software or derivative works of the contribution in
the software.
3. Conditions and Limitations
(A) No Trademark License- This license does not grant you rights to use any
contributors name, logo, or trademarks.
(B) If you bring a patent claim against any contributor over patents that you
claim are infringed by the software, your patent license from such contributor
to the software ends automatically.
(C) If you distribute any portion of the software, you must retain all
copyright, patent, trademark, and attribution notices that are present in the
software.
(D) If you distribute any portion of the software in source code form, you may
do so only under this license by including a complete copy of this license
with your distribution. If you distribute any portion of the software in
compiled or object code form, you may only do so under a license that complies
with this license.
(E) The software is licensed “as-is.” You bear the risk of using it. The
contributors give no express warranties, guarantees or conditions. You may
have additional consumer rights under your local laws which this license
cannot change. To the extent permitted under your local laws, the contributors
exclude the implied warranties of merchantability, fitness for a particular
purpose and non-infringement.
(F) Platform Limitation - The licenses granted in sections 2(A) and 2(B)
extend only to the software or derivative works that you create that run on a
Microsoft Windows operating system product.
-------------------------------------------------------------------------------
Files:
system_wrappers/source/spreadsortlib/constants.hpp
system_wrappers/source/spreadsortlib/spreadsort.hpp
License:
/*Boost Software License - Version 1.0 - August 17th, 2003
Permission is hereby granted, free of charge, to any person or organization
obtaining a copy of the software and accompanying documentation covered by
this license (the "Software") to use, reproduce, display, distribute,
execute, and transmit the Software, and to prepare derivative works of the
Software, and to permit third-parties to whom the Software is furnished to
do so, all subject to the following:
The copyright notices in the Software and this entire statement, including
the above license grant, this restriction and the following disclaimer,
must be included in all copies of the Software, in whole or in part, and
all derivative works of the Software, unless such copies or derivative
works are solely in the form of machine-executable object code generated by
a source language processor.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.*/

315
webrtc/api/array_view.h Normal file
View File

@ -0,0 +1,315 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_ARRAY_VIEW_H_
#define API_ARRAY_VIEW_H_
#include <algorithm>
#include <array>
#include <type_traits>
#include "rtc_base/checks.h"
#include "rtc_base/type_traits.h"
namespace rtc {
// tl;dr: rtc::ArrayView is the same thing as gsl::span from the Guideline
// Support Library.
//
// Many functions read from or write to arrays. The obvious way to do this is
// to use two arguments, a pointer to the first element and an element count:
//
// bool Contains17(const int* arr, size_t size) {
// for (size_t i = 0; i < size; ++i) {
// if (arr[i] == 17)
// return true;
// }
// return false;
// }
//
// This is flexible, since it doesn't matter how the array is stored (C array,
// std::vector, rtc::Buffer, ...), but it's error-prone because the caller has
// to correctly specify the array length:
//
// Contains17(arr, arraysize(arr)); // C array
// Contains17(arr.data(), arr.size()); // std::vector
// Contains17(arr, size); // pointer + size
// ...
//
// It's also kind of messy to have two separate arguments for what is
// conceptually a single thing.
//
// Enter rtc::ArrayView<T>. It contains a T pointer (to an array it doesn't
// own) and a count, and supports the basic things you'd expect, such as
// indexing and iteration. It allows us to write our function like this:
//
// bool Contains17(rtc::ArrayView<const int> arr) {
// for (auto e : arr) {
// if (e == 17)
// return true;
// }
// return false;
// }
//
// And even better, because a bunch of things will implicitly convert to
// ArrayView, we can call it like this:
//
// Contains17(arr); // C array
// Contains17(arr); // std::vector
// Contains17(rtc::ArrayView<int>(arr, size)); // pointer + size
// Contains17(nullptr); // nullptr -> empty ArrayView
// ...
//
// ArrayView<T> stores both a pointer and a size, but you may also use
// ArrayView<T, N>, which has a size that's fixed at compile time (which means
// it only has to store the pointer).
//
// One important point is that ArrayView<T> and ArrayView<const T> are
// different types, which allow and don't allow mutation of the array elements,
// respectively. The implicit conversions work just like you'd hope, so that
// e.g. vector<int> will convert to either ArrayView<int> or ArrayView<const
// int>, but const vector<int> will convert only to ArrayView<const int>.
// (ArrayView itself can be the source type in such conversions, so
// ArrayView<int> will convert to ArrayView<const int>.)
//
// Note: ArrayView is tiny (just a pointer and a count if variable-sized, just
// a pointer if fix-sized) and trivially copyable, so it's probably cheaper to
// pass it by value than by const reference.
namespace impl {
// Magic constant for indicating that the size of an ArrayView is variable
// instead of fixed.
enum : std::ptrdiff_t { kArrayViewVarSize = -4711 };
// Base class for ArrayViews of fixed nonzero size.
template <typename T, std::ptrdiff_t Size>
class ArrayViewBase {
static_assert(Size > 0, "ArrayView size must be variable or non-negative");
public:
ArrayViewBase(T* data, size_t size) : data_(data) {}
static constexpr size_t size() { return Size; }
static constexpr bool empty() { return false; }
T* data() const { return data_; }
protected:
static constexpr bool fixed_size() { return true; }
private:
T* data_;
};
// Specialized base class for ArrayViews of fixed zero size.
template <typename T>
class ArrayViewBase<T, 0> {
public:
explicit ArrayViewBase(T* data, size_t size) {}
static constexpr size_t size() { return 0; }
static constexpr bool empty() { return true; }
T* data() const { return nullptr; }
protected:
static constexpr bool fixed_size() { return true; }
};
// Specialized base class for ArrayViews of variable size.
template <typename T>
class ArrayViewBase<T, impl::kArrayViewVarSize> {
public:
ArrayViewBase(T* data, size_t size)
: data_(size == 0 ? nullptr : data), size_(size) {}
size_t size() const { return size_; }
bool empty() const { return size_ == 0; }
T* data() const { return data_; }
protected:
static constexpr bool fixed_size() { return false; }
private:
T* data_;
size_t size_;
};
} // namespace impl
template <typename T, std::ptrdiff_t Size = impl::kArrayViewVarSize>
class ArrayView final : public impl::ArrayViewBase<T, Size> {
public:
using value_type = T;
using const_iterator = const T*;
// Construct an ArrayView from a pointer and a length.
template <typename U>
ArrayView(U* data, size_t size)
: impl::ArrayViewBase<T, Size>::ArrayViewBase(data, size) {
RTC_DCHECK_EQ(size == 0 ? nullptr : data, this->data());
RTC_DCHECK_EQ(size, this->size());
RTC_DCHECK_EQ(!this->data(),
this->size() == 0); // data is null iff size == 0.
}
// Construct an empty ArrayView. Note that fixed-size ArrayViews of size > 0
// cannot be empty.
ArrayView() : ArrayView(nullptr, 0) {}
ArrayView(std::nullptr_t) // NOLINT
: ArrayView() {}
ArrayView(std::nullptr_t, size_t size)
: ArrayView(static_cast<T*>(nullptr), size) {
static_assert(Size == 0 || Size == impl::kArrayViewVarSize, "");
RTC_DCHECK_EQ(0, size);
}
// Construct an ArrayView from a C-style array.
template <typename U, size_t N>
ArrayView(U (&array)[N]) // NOLINT
: ArrayView(array, N) {
static_assert(Size == N || Size == impl::kArrayViewVarSize,
"Array size must match ArrayView size");
}
// (Only if size is fixed.) Construct a fixed size ArrayView<T, N> from a
// non-const std::array instance. For an ArrayView with variable size, the
// used ctor is ArrayView(U& u) instead.
template <typename U,
size_t N,
typename std::enable_if<
Size == static_cast<std::ptrdiff_t>(N)>::type* = nullptr>
ArrayView(std::array<U, N>& u) // NOLINT
: ArrayView(u.data(), u.size()) {}
// (Only if size is fixed.) Construct a fixed size ArrayView<T, N> where T is
// const from a const(expr) std::array instance. For an ArrayView with
// variable size, the used ctor is ArrayView(U& u) instead.
template <typename U,
size_t N,
typename std::enable_if<
Size == static_cast<std::ptrdiff_t>(N)>::type* = nullptr>
ArrayView(const std::array<U, N>& u) // NOLINT
: ArrayView(u.data(), u.size()) {}
// (Only if size is fixed.) Construct an ArrayView from any type U that has a
// static constexpr size() method whose return value is equal to Size, and a
// data() method whose return value converts implicitly to T*. In particular,
// this means we allow conversion from ArrayView<T, N> to ArrayView<const T,
// N>, but not the other way around. We also don't allow conversion from
// ArrayView<T> to ArrayView<T, N>, or from ArrayView<T, M> to ArrayView<T,
// N> when M != N.
template <
typename U,
typename std::enable_if<Size != impl::kArrayViewVarSize &&
HasDataAndSize<U, T>::value>::type* = nullptr>
ArrayView(U& u) // NOLINT
: ArrayView(u.data(), u.size()) {
static_assert(U::size() == Size, "Sizes must match exactly");
}
template <
typename U,
typename std::enable_if<Size != impl::kArrayViewVarSize &&
HasDataAndSize<U, T>::value>::type* = nullptr>
ArrayView(const U& u) // NOLINT(runtime/explicit)
: ArrayView(u.data(), u.size()) {
static_assert(U::size() == Size, "Sizes must match exactly");
}
// (Only if size is variable.) Construct an ArrayView from any type U that
// has a size() method whose return value converts implicitly to size_t, and
// a data() method whose return value converts implicitly to T*. In
// particular, this means we allow conversion from ArrayView<T> to
// ArrayView<const T>, but not the other way around. Other allowed
// conversions include
// ArrayView<T, N> to ArrayView<T> or ArrayView<const T>,
// std::vector<T> to ArrayView<T> or ArrayView<const T>,
// const std::vector<T> to ArrayView<const T>,
// rtc::Buffer to ArrayView<uint8_t> or ArrayView<const uint8_t>, and
// const rtc::Buffer to ArrayView<const uint8_t>.
template <
typename U,
typename std::enable_if<Size == impl::kArrayViewVarSize &&
HasDataAndSize<U, T>::value>::type* = nullptr>
ArrayView(U& u) // NOLINT
: ArrayView(u.data(), u.size()) {}
template <
typename U,
typename std::enable_if<Size == impl::kArrayViewVarSize &&
HasDataAndSize<U, T>::value>::type* = nullptr>
ArrayView(const U& u) // NOLINT(runtime/explicit)
: ArrayView(u.data(), u.size()) {}
// Indexing and iteration. These allow mutation even if the ArrayView is
// const, because the ArrayView doesn't own the array. (To prevent mutation,
// use a const element type.)
T& operator[](size_t idx) const {
RTC_DCHECK_LT(idx, this->size());
RTC_DCHECK(this->data());
return this->data()[idx];
}
T* begin() const { return this->data(); }
T* end() const { return this->data() + this->size(); }
const T* cbegin() const { return this->data(); }
const T* cend() const { return this->data() + this->size(); }
ArrayView<T> subview(size_t offset, size_t size) const {
return offset < this->size()
? ArrayView<T>(this->data() + offset,
std::min(size, this->size() - offset))
: ArrayView<T>();
}
ArrayView<T> subview(size_t offset) const {
return subview(offset, this->size());
}
};
// Comparing two ArrayViews compares their (pointer,size) pairs; it does *not*
// dereference the pointers.
template <typename T, std::ptrdiff_t Size1, std::ptrdiff_t Size2>
bool operator==(const ArrayView<T, Size1>& a, const ArrayView<T, Size2>& b) {
return a.data() == b.data() && a.size() == b.size();
}
template <typename T, std::ptrdiff_t Size1, std::ptrdiff_t Size2>
bool operator!=(const ArrayView<T, Size1>& a, const ArrayView<T, Size2>& b) {
return !(a == b);
}
// Variable-size ArrayViews are the size of two pointers; fixed-size ArrayViews
// are the size of one pointer. (And as a special case, fixed-size ArrayViews
// of size 0 require no storage.)
static_assert(sizeof(ArrayView<int>) == 2 * sizeof(int*), "");
static_assert(sizeof(ArrayView<int, 17>) == sizeof(int*), "");
static_assert(std::is_empty<ArrayView<int, 0>>::value, "");
template <typename T>
inline ArrayView<T> MakeArrayView(T* data, size_t size) {
return ArrayView<T>(data, size);
}
// Only for primitive types that have the same size and aligment.
// Allow reinterpret cast of the array view to another primitive type of the
// same size.
// Template arguments order is (U, T, Size) to allow deduction of the template
// arguments in client calls: reinterpret_array_view<target_type>(array_view).
template <typename U, typename T, std::ptrdiff_t Size>
inline ArrayView<U, Size> reinterpret_array_view(ArrayView<T, Size> view) {
static_assert(sizeof(U) == sizeof(T) && alignof(U) == alignof(T),
"ArrayView reinterpret_cast is only supported for casting "
"between views that represent the same chunk of memory.");
static_assert(
std::is_fundamental<T>::value && std::is_fundamental<U>::value,
"ArrayView reinterpret_cast is only supported for casting between "
"fundamental types.");
return ArrayView<U, Size>(reinterpret_cast<U*>(view.data()), view.size());
}
} // namespace rtc
#endif // API_ARRAY_VIEW_H_

View File

@ -0,0 +1,164 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/audio/audio_frame.h"
#include <string.h>
#include <algorithm>
#include <utility>
#include "rtc_base/checks.h"
#include "rtc_base/time_utils.h"
namespace webrtc {
AudioFrame::AudioFrame() {
// Visual Studio doesn't like this in the class definition.
static_assert(sizeof(data_) == kMaxDataSizeBytes, "kMaxDataSizeBytes");
}
void swap(AudioFrame& a, AudioFrame& b) {
using std::swap;
swap(a.timestamp_, b.timestamp_);
swap(a.elapsed_time_ms_, b.elapsed_time_ms_);
swap(a.ntp_time_ms_, b.ntp_time_ms_);
swap(a.samples_per_channel_, b.samples_per_channel_);
swap(a.sample_rate_hz_, b.sample_rate_hz_);
swap(a.num_channels_, b.num_channels_);
swap(a.channel_layout_, b.channel_layout_);
swap(a.speech_type_, b.speech_type_);
swap(a.vad_activity_, b.vad_activity_);
swap(a.profile_timestamp_ms_, b.profile_timestamp_ms_);
swap(a.packet_infos_, b.packet_infos_);
const size_t length_a = a.samples_per_channel_ * a.num_channels_;
const size_t length_b = b.samples_per_channel_ * b.num_channels_;
RTC_DCHECK_LE(length_a, AudioFrame::kMaxDataSizeSamples);
RTC_DCHECK_LE(length_b, AudioFrame::kMaxDataSizeSamples);
std::swap_ranges(a.data_, a.data_ + std::max(length_a, length_b), b.data_);
swap(a.muted_, b.muted_);
swap(a.absolute_capture_timestamp_ms_, b.absolute_capture_timestamp_ms_);
}
void AudioFrame::Reset() {
ResetWithoutMuting();
muted_ = true;
}
void AudioFrame::ResetWithoutMuting() {
// TODO(wu): Zero is a valid value for |timestamp_|. We should initialize
// to an invalid value, or add a new member to indicate invalidity.
timestamp_ = 0;
elapsed_time_ms_ = -1;
ntp_time_ms_ = -1;
samples_per_channel_ = 0;
sample_rate_hz_ = 0;
num_channels_ = 0;
channel_layout_ = CHANNEL_LAYOUT_NONE;
speech_type_ = kUndefined;
vad_activity_ = kVadUnknown;
profile_timestamp_ms_ = 0;
packet_infos_ = RtpPacketInfos();
absolute_capture_timestamp_ms_ = absl::nullopt;
}
void AudioFrame::UpdateFrame(uint32_t timestamp,
const int16_t* data,
size_t samples_per_channel,
int sample_rate_hz,
SpeechType speech_type,
VADActivity vad_activity,
size_t num_channels) {
timestamp_ = timestamp;
samples_per_channel_ = samples_per_channel;
sample_rate_hz_ = sample_rate_hz;
speech_type_ = speech_type;
vad_activity_ = vad_activity;
num_channels_ = num_channels;
channel_layout_ = GuessChannelLayout(num_channels);
if (channel_layout_ != CHANNEL_LAYOUT_UNSUPPORTED) {
RTC_DCHECK_EQ(num_channels, ChannelLayoutToChannelCount(channel_layout_));
}
const size_t length = samples_per_channel * num_channels;
RTC_CHECK_LE(length, kMaxDataSizeSamples);
if (data != nullptr) {
memcpy(data_, data, sizeof(int16_t) * length);
muted_ = false;
} else {
muted_ = true;
}
}
void AudioFrame::CopyFrom(const AudioFrame& src) {
if (this == &src)
return;
timestamp_ = src.timestamp_;
elapsed_time_ms_ = src.elapsed_time_ms_;
ntp_time_ms_ = src.ntp_time_ms_;
packet_infos_ = src.packet_infos_;
muted_ = src.muted();
samples_per_channel_ = src.samples_per_channel_;
sample_rate_hz_ = src.sample_rate_hz_;
speech_type_ = src.speech_type_;
vad_activity_ = src.vad_activity_;
num_channels_ = src.num_channels_;
channel_layout_ = src.channel_layout_;
absolute_capture_timestamp_ms_ = src.absolute_capture_timestamp_ms();
const size_t length = samples_per_channel_ * num_channels_;
RTC_CHECK_LE(length, kMaxDataSizeSamples);
if (!src.muted()) {
memcpy(data_, src.data(), sizeof(int16_t) * length);
muted_ = false;
}
}
void AudioFrame::UpdateProfileTimeStamp() {
profile_timestamp_ms_ = rtc::TimeMillis();
}
int64_t AudioFrame::ElapsedProfileTimeMs() const {
if (profile_timestamp_ms_ == 0) {
// Profiling has not been activated.
return -1;
}
return rtc::TimeSince(profile_timestamp_ms_);
}
const int16_t* AudioFrame::data() const {
return muted_ ? empty_data() : data_;
}
// TODO(henrik.lundin) Can we skip zeroing the buffer?
// See https://bugs.chromium.org/p/webrtc/issues/detail?id=5647.
int16_t* AudioFrame::mutable_data() {
if (muted_) {
memset(data_, 0, kMaxDataSizeBytes);
muted_ = false;
}
return data_;
}
void AudioFrame::Mute() {
muted_ = true;
}
bool AudioFrame::muted() const {
return muted_;
}
// static
const int16_t* AudioFrame::empty_data() {
static int16_t* null_data = new int16_t[kMaxDataSizeSamples]();
return &null_data[0];
}
} // namespace webrtc

View File

@ -0,0 +1,177 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_AUDIO_AUDIO_FRAME_H_
#define API_AUDIO_AUDIO_FRAME_H_
#include <stddef.h>
#include <stdint.h>
#include <utility>
#include "api/audio/channel_layout.h"
#include "api/rtp_packet_infos.h"
#include "rtc_base/constructor_magic.h"
namespace webrtc {
/* This class holds up to 120 ms of super-wideband (32 kHz) stereo audio. It
* allows for adding and subtracting frames while keeping track of the resulting
* states.
*
* Notes
* - This is a de-facto api, not designed for external use. The AudioFrame class
* is in need of overhaul or even replacement, and anyone depending on it
* should be prepared for that.
* - The total number of samples is samples_per_channel_ * num_channels_.
* - Stereo data is interleaved starting with the left channel.
*/
class AudioFrame {
public:
// Using constexpr here causes linker errors unless the variable also has an
// out-of-class definition, which is impractical in this header-only class.
// (This makes no sense because it compiles as an enum value, which we most
// certainly cannot take the address of, just fine.) C++17 introduces inline
// variables which should allow us to switch to constexpr and keep this a
// header-only class.
enum : size_t {
// Stereo, 32 kHz, 120 ms (2 * 32 * 120)
// Stereo, 192 kHz, 20 ms (2 * 192 * 20)
kMaxDataSizeSamples = 7680,
kMaxDataSizeBytes = kMaxDataSizeSamples * sizeof(int16_t),
};
enum VADActivity { kVadActive = 0, kVadPassive = 1, kVadUnknown = 2 };
enum SpeechType {
kNormalSpeech = 0,
kPLC = 1,
kCNG = 2,
kPLCCNG = 3,
kCodecPLC = 5,
kUndefined = 4
};
AudioFrame();
friend void swap(AudioFrame& a, AudioFrame& b);
// Resets all members to their default state.
void Reset();
// Same as Reset(), but leaves mute state unchanged. Muting a frame requires
// the buffer to be zeroed on the next call to mutable_data(). Callers
// intending to write to the buffer immediately after Reset() can instead use
// ResetWithoutMuting() to skip this wasteful zeroing.
void ResetWithoutMuting();
void UpdateFrame(uint32_t timestamp,
const int16_t* data,
size_t samples_per_channel,
int sample_rate_hz,
SpeechType speech_type,
VADActivity vad_activity,
size_t num_channels = 1);
void CopyFrom(const AudioFrame& src);
// Sets a wall-time clock timestamp in milliseconds to be used for profiling
// of time between two points in the audio chain.
// Example:
// t0: UpdateProfileTimeStamp()
// t1: ElapsedProfileTimeMs() => t1 - t0 [msec]
void UpdateProfileTimeStamp();
// Returns the time difference between now and when UpdateProfileTimeStamp()
// was last called. Returns -1 if UpdateProfileTimeStamp() has not yet been
// called.
int64_t ElapsedProfileTimeMs() const;
// data() returns a zeroed static buffer if the frame is muted.
// mutable_frame() always returns a non-static buffer; the first call to
// mutable_frame() zeros the non-static buffer and marks the frame unmuted.
const int16_t* data() const;
int16_t* mutable_data();
// Prefer to mute frames using AudioFrameOperations::Mute.
void Mute();
// Frame is muted by default.
bool muted() const;
size_t max_16bit_samples() const { return kMaxDataSizeSamples; }
size_t samples_per_channel() const { return samples_per_channel_; }
size_t num_channels() const { return num_channels_; }
ChannelLayout channel_layout() const { return channel_layout_; }
int sample_rate_hz() const { return sample_rate_hz_; }
void set_absolute_capture_timestamp_ms(
int64_t absolute_capture_time_stamp_ms) {
absolute_capture_timestamp_ms_ = absolute_capture_time_stamp_ms;
}
absl::optional<int64_t> absolute_capture_timestamp_ms() const {
return absolute_capture_timestamp_ms_;
}
// RTP timestamp of the first sample in the AudioFrame.
uint32_t timestamp_ = 0;
// Time since the first frame in milliseconds.
// -1 represents an uninitialized value.
int64_t elapsed_time_ms_ = -1;
// NTP time of the estimated capture time in local timebase in milliseconds.
// -1 represents an uninitialized value.
int64_t ntp_time_ms_ = -1;
size_t samples_per_channel_ = 0;
int sample_rate_hz_ = 0;
size_t num_channels_ = 0;
ChannelLayout channel_layout_ = CHANNEL_LAYOUT_NONE;
SpeechType speech_type_ = kUndefined;
VADActivity vad_activity_ = kVadUnknown;
// Monotonically increasing timestamp intended for profiling of audio frames.
// Typically used for measuring elapsed time between two different points in
// the audio path. No lock is used to save resources and we are thread safe
// by design.
// TODO(nisse@webrtc.org): consider using absl::optional.
int64_t profile_timestamp_ms_ = 0;
// Information about packets used to assemble this audio frame. This is needed
// by |SourceTracker| when the frame is delivered to the RTCRtpReceiver's
// MediaStreamTrack, in order to implement getContributingSources(). See:
// https://w3c.github.io/webrtc-pc/#dom-rtcrtpreceiver-getcontributingsources
//
// TODO(bugs.webrtc.org/10757):
// Note that this information might not be fully accurate since we currently
// don't have a proper way to track it across the audio sync buffer. The
// sync buffer is the small sample-holding buffer located after the audio
// decoder and before where samples are assembled into output frames.
//
// |RtpPacketInfos| may also be empty if the audio samples did not come from
// RTP packets. E.g. if the audio were locally generated by packet loss
// concealment, comfort noise generation, etc.
RtpPacketInfos packet_infos_;
private:
// A permanently zeroed out buffer to represent muted frames. This is a
// header-only class, so the only way to avoid creating a separate empty
// buffer per translation unit is to wrap a static in an inline function.
static const int16_t* empty_data();
int16_t data_[kMaxDataSizeSamples];
bool muted_ = true;
// Absolute capture timestamp when this audio frame was originally captured.
// This is only valid for audio frames captured on this machine. The absolute
// capture timestamp of a received frame is found in |packet_infos_|.
// This timestamp MUST be based on the same clock as rtc::TimeMillis().
absl::optional<int64_t> absolute_capture_timestamp_ms_;
RTC_DISALLOW_COPY_AND_ASSIGN(AudioFrame);
};
} // namespace webrtc
#endif // API_AUDIO_AUDIO_FRAME_H_

View File

@ -0,0 +1,282 @@
/*
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/audio/channel_layout.h"
#include <stddef.h>
#include "rtc_base/arraysize.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
namespace webrtc {
static const int kLayoutToChannels[] = {
0, // CHANNEL_LAYOUT_NONE
0, // CHANNEL_LAYOUT_UNSUPPORTED
1, // CHANNEL_LAYOUT_MONO
2, // CHANNEL_LAYOUT_STEREO
3, // CHANNEL_LAYOUT_2_1
3, // CHANNEL_LAYOUT_SURROUND
4, // CHANNEL_LAYOUT_4_0
4, // CHANNEL_LAYOUT_2_2
4, // CHANNEL_LAYOUT_QUAD
5, // CHANNEL_LAYOUT_5_0
6, // CHANNEL_LAYOUT_5_1
5, // CHANNEL_LAYOUT_5_0_BACK
6, // CHANNEL_LAYOUT_5_1_BACK
7, // CHANNEL_LAYOUT_7_0
8, // CHANNEL_LAYOUT_7_1
8, // CHANNEL_LAYOUT_7_1_WIDE
2, // CHANNEL_LAYOUT_STEREO_DOWNMIX
3, // CHANNEL_LAYOUT_2POINT1
4, // CHANNEL_LAYOUT_3_1
5, // CHANNEL_LAYOUT_4_1
6, // CHANNEL_LAYOUT_6_0
6, // CHANNEL_LAYOUT_6_0_FRONT
6, // CHANNEL_LAYOUT_HEXAGONAL
7, // CHANNEL_LAYOUT_6_1
7, // CHANNEL_LAYOUT_6_1_BACK
7, // CHANNEL_LAYOUT_6_1_FRONT
7, // CHANNEL_LAYOUT_7_0_FRONT
8, // CHANNEL_LAYOUT_7_1_WIDE_BACK
8, // CHANNEL_LAYOUT_OCTAGONAL
0, // CHANNEL_LAYOUT_DISCRETE
3, // CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC
5, // CHANNEL_LAYOUT_4_1_QUAD_SIDE
0, // CHANNEL_LAYOUT_BITSTREAM
};
// The channel orderings for each layout as specified by FFmpeg. Each value
// represents the index of each channel in each layout. Values of -1 mean the
// channel at that index is not used for that layout. For example, the left side
// surround sound channel in FFmpeg's 5.1 layout is in the 5th position (because
// the order is L, R, C, LFE, LS, RS), so
// kChannelOrderings[CHANNEL_LAYOUT_5_1][SIDE_LEFT] = 4;
static const int kChannelOrderings[CHANNEL_LAYOUT_MAX + 1][CHANNELS_MAX + 1] = {
// FL | FR | FC | LFE | BL | BR | FLofC | FRofC | BC | SL | SR
// CHANNEL_LAYOUT_NONE
{-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1},
// CHANNEL_LAYOUT_UNSUPPORTED
{-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1},
// CHANNEL_LAYOUT_MONO
{-1, -1, 0, -1, -1, -1, -1, -1, -1, -1, -1},
// CHANNEL_LAYOUT_STEREO
{0, 1, -1, -1, -1, -1, -1, -1, -1, -1, -1},
// CHANNEL_LAYOUT_2_1
{0, 1, -1, -1, -1, -1, -1, -1, 2, -1, -1},
// CHANNEL_LAYOUT_SURROUND
{0, 1, 2, -1, -1, -1, -1, -1, -1, -1, -1},
// CHANNEL_LAYOUT_4_0
{0, 1, 2, -1, -1, -1, -1, -1, 3, -1, -1},
// CHANNEL_LAYOUT_2_2
{0, 1, -1, -1, -1, -1, -1, -1, -1, 2, 3},
// CHANNEL_LAYOUT_QUAD
{0, 1, -1, -1, 2, 3, -1, -1, -1, -1, -1},
// CHANNEL_LAYOUT_5_0
{0, 1, 2, -1, -1, -1, -1, -1, -1, 3, 4},
// CHANNEL_LAYOUT_5_1
{0, 1, 2, 3, -1, -1, -1, -1, -1, 4, 5},
// FL | FR | FC | LFE | BL | BR | FLofC | FRofC | BC | SL | SR
// CHANNEL_LAYOUT_5_0_BACK
{0, 1, 2, -1, 3, 4, -1, -1, -1, -1, -1},
// CHANNEL_LAYOUT_5_1_BACK
{0, 1, 2, 3, 4, 5, -1, -1, -1, -1, -1},
// CHANNEL_LAYOUT_7_0
{0, 1, 2, -1, 5, 6, -1, -1, -1, 3, 4},
// CHANNEL_LAYOUT_7_1
{0, 1, 2, 3, 6, 7, -1, -1, -1, 4, 5},
// CHANNEL_LAYOUT_7_1_WIDE
{0, 1, 2, 3, -1, -1, 6, 7, -1, 4, 5},
// CHANNEL_LAYOUT_STEREO_DOWNMIX
{0, 1, -1, -1, -1, -1, -1, -1, -1, -1, -1},
// CHANNEL_LAYOUT_2POINT1
{0, 1, -1, 2, -1, -1, -1, -1, -1, -1, -1},
// CHANNEL_LAYOUT_3_1
{0, 1, 2, 3, -1, -1, -1, -1, -1, -1, -1},
// CHANNEL_LAYOUT_4_1
{0, 1, 2, 4, -1, -1, -1, -1, 3, -1, -1},
// CHANNEL_LAYOUT_6_0
{0, 1, 2, -1, -1, -1, -1, -1, 5, 3, 4},
// CHANNEL_LAYOUT_6_0_FRONT
{0, 1, -1, -1, -1, -1, 4, 5, -1, 2, 3},
// FL | FR | FC | LFE | BL | BR | FLofC | FRofC | BC | SL | SR
// CHANNEL_LAYOUT_HEXAGONAL
{0, 1, 2, -1, 3, 4, -1, -1, 5, -1, -1},
// CHANNEL_LAYOUT_6_1
{0, 1, 2, 3, -1, -1, -1, -1, 6, 4, 5},
// CHANNEL_LAYOUT_6_1_BACK
{0, 1, 2, 3, 4, 5, -1, -1, 6, -1, -1},
// CHANNEL_LAYOUT_6_1_FRONT
{0, 1, -1, 6, -1, -1, 4, 5, -1, 2, 3},
// CHANNEL_LAYOUT_7_0_FRONT
{0, 1, 2, -1, -1, -1, 5, 6, -1, 3, 4},
// CHANNEL_LAYOUT_7_1_WIDE_BACK
{0, 1, 2, 3, 4, 5, 6, 7, -1, -1, -1},
// CHANNEL_LAYOUT_OCTAGONAL
{0, 1, 2, -1, 5, 6, -1, -1, 7, 3, 4},
// CHANNEL_LAYOUT_DISCRETE
{-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1},
// CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC
{0, 1, 2, -1, -1, -1, -1, -1, -1, -1, -1},
// CHANNEL_LAYOUT_4_1_QUAD_SIDE
{0, 1, -1, 4, -1, -1, -1, -1, -1, 2, 3},
// CHANNEL_LAYOUT_BITSTREAM
{-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1},
// FL | FR | FC | LFE | BL | BR | FLofC | FRofC | BC | SL | SR
};
int ChannelLayoutToChannelCount(ChannelLayout layout) {
RTC_DCHECK_LT(static_cast<size_t>(layout), arraysize(kLayoutToChannels));
RTC_DCHECK_LE(kLayoutToChannels[layout], kMaxConcurrentChannels);
return kLayoutToChannels[layout];
}
// Converts a channel count into a channel layout.
ChannelLayout GuessChannelLayout(int channels) {
switch (channels) {
case 1:
return CHANNEL_LAYOUT_MONO;
case 2:
return CHANNEL_LAYOUT_STEREO;
case 3:
return CHANNEL_LAYOUT_SURROUND;
case 4:
return CHANNEL_LAYOUT_QUAD;
case 5:
return CHANNEL_LAYOUT_5_0;
case 6:
return CHANNEL_LAYOUT_5_1;
case 7:
return CHANNEL_LAYOUT_6_1;
case 8:
return CHANNEL_LAYOUT_7_1;
default:
RTC_DLOG(LS_WARNING) << "Unsupported channel count: " << channels;
}
return CHANNEL_LAYOUT_UNSUPPORTED;
}
int ChannelOrder(ChannelLayout layout, Channels channel) {
RTC_DCHECK_LT(static_cast<size_t>(layout), arraysize(kChannelOrderings));
RTC_DCHECK_LT(static_cast<size_t>(channel), arraysize(kChannelOrderings[0]));
return kChannelOrderings[layout][channel];
}
const char* ChannelLayoutToString(ChannelLayout layout) {
switch (layout) {
case CHANNEL_LAYOUT_NONE:
return "NONE";
case CHANNEL_LAYOUT_UNSUPPORTED:
return "UNSUPPORTED";
case CHANNEL_LAYOUT_MONO:
return "MONO";
case CHANNEL_LAYOUT_STEREO:
return "STEREO";
case CHANNEL_LAYOUT_2_1:
return "2.1";
case CHANNEL_LAYOUT_SURROUND:
return "SURROUND";
case CHANNEL_LAYOUT_4_0:
return "4.0";
case CHANNEL_LAYOUT_2_2:
return "QUAD_SIDE";
case CHANNEL_LAYOUT_QUAD:
return "QUAD";
case CHANNEL_LAYOUT_5_0:
return "5.0";
case CHANNEL_LAYOUT_5_1:
return "5.1";
case CHANNEL_LAYOUT_5_0_BACK:
return "5.0_BACK";
case CHANNEL_LAYOUT_5_1_BACK:
return "5.1_BACK";
case CHANNEL_LAYOUT_7_0:
return "7.0";
case CHANNEL_LAYOUT_7_1:
return "7.1";
case CHANNEL_LAYOUT_7_1_WIDE:
return "7.1_WIDE";
case CHANNEL_LAYOUT_STEREO_DOWNMIX:
return "STEREO_DOWNMIX";
case CHANNEL_LAYOUT_2POINT1:
return "2POINT1";
case CHANNEL_LAYOUT_3_1:
return "3.1";
case CHANNEL_LAYOUT_4_1:
return "4.1";
case CHANNEL_LAYOUT_6_0:
return "6.0";
case CHANNEL_LAYOUT_6_0_FRONT:
return "6.0_FRONT";
case CHANNEL_LAYOUT_HEXAGONAL:
return "HEXAGONAL";
case CHANNEL_LAYOUT_6_1:
return "6.1";
case CHANNEL_LAYOUT_6_1_BACK:
return "6.1_BACK";
case CHANNEL_LAYOUT_6_1_FRONT:
return "6.1_FRONT";
case CHANNEL_LAYOUT_7_0_FRONT:
return "7.0_FRONT";
case CHANNEL_LAYOUT_7_1_WIDE_BACK:
return "7.1_WIDE_BACK";
case CHANNEL_LAYOUT_OCTAGONAL:
return "OCTAGONAL";
case CHANNEL_LAYOUT_DISCRETE:
return "DISCRETE";
case CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC:
return "STEREO_AND_KEYBOARD_MIC";
case CHANNEL_LAYOUT_4_1_QUAD_SIDE:
return "4.1_QUAD_SIDE";
case CHANNEL_LAYOUT_BITSTREAM:
return "BITSTREAM";
}
RTC_NOTREACHED() << "Invalid channel layout provided: " << layout;
return "";
}
} // namespace webrtc

View File

@ -0,0 +1,165 @@
/*
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_AUDIO_CHANNEL_LAYOUT_H_
#define API_AUDIO_CHANNEL_LAYOUT_H_
namespace webrtc {
// This file is derived from Chromium's base/channel_layout.h.
// Enumerates the various representations of the ordering of audio channels.
// Logged to UMA, so never reuse a value, always add new/greater ones!
enum ChannelLayout {
CHANNEL_LAYOUT_NONE = 0,
CHANNEL_LAYOUT_UNSUPPORTED = 1,
// Front C
CHANNEL_LAYOUT_MONO = 2,
// Front L, Front R
CHANNEL_LAYOUT_STEREO = 3,
// Front L, Front R, Back C
CHANNEL_LAYOUT_2_1 = 4,
// Front L, Front R, Front C
CHANNEL_LAYOUT_SURROUND = 5,
// Front L, Front R, Front C, Back C
CHANNEL_LAYOUT_4_0 = 6,
// Front L, Front R, Side L, Side R
CHANNEL_LAYOUT_2_2 = 7,
// Front L, Front R, Back L, Back R
CHANNEL_LAYOUT_QUAD = 8,
// Front L, Front R, Front C, Side L, Side R
CHANNEL_LAYOUT_5_0 = 9,
// Front L, Front R, Front C, LFE, Side L, Side R
CHANNEL_LAYOUT_5_1 = 10,
// Front L, Front R, Front C, Back L, Back R
CHANNEL_LAYOUT_5_0_BACK = 11,
// Front L, Front R, Front C, LFE, Back L, Back R
CHANNEL_LAYOUT_5_1_BACK = 12,
// Front L, Front R, Front C, Side L, Side R, Back L, Back R
CHANNEL_LAYOUT_7_0 = 13,
// Front L, Front R, Front C, LFE, Side L, Side R, Back L, Back R
CHANNEL_LAYOUT_7_1 = 14,
// Front L, Front R, Front C, LFE, Side L, Side R, Front LofC, Front RofC
CHANNEL_LAYOUT_7_1_WIDE = 15,
// Stereo L, Stereo R
CHANNEL_LAYOUT_STEREO_DOWNMIX = 16,
// Stereo L, Stereo R, LFE
CHANNEL_LAYOUT_2POINT1 = 17,
// Stereo L, Stereo R, Front C, LFE
CHANNEL_LAYOUT_3_1 = 18,
// Stereo L, Stereo R, Front C, Rear C, LFE
CHANNEL_LAYOUT_4_1 = 19,
// Stereo L, Stereo R, Front C, Side L, Side R, Back C
CHANNEL_LAYOUT_6_0 = 20,
// Stereo L, Stereo R, Side L, Side R, Front LofC, Front RofC
CHANNEL_LAYOUT_6_0_FRONT = 21,
// Stereo L, Stereo R, Front C, Rear L, Rear R, Rear C
CHANNEL_LAYOUT_HEXAGONAL = 22,
// Stereo L, Stereo R, Front C, LFE, Side L, Side R, Rear Center
CHANNEL_LAYOUT_6_1 = 23,
// Stereo L, Stereo R, Front C, LFE, Back L, Back R, Rear Center
CHANNEL_LAYOUT_6_1_BACK = 24,
// Stereo L, Stereo R, Side L, Side R, Front LofC, Front RofC, LFE
CHANNEL_LAYOUT_6_1_FRONT = 25,
// Front L, Front R, Front C, Side L, Side R, Front LofC, Front RofC
CHANNEL_LAYOUT_7_0_FRONT = 26,
// Front L, Front R, Front C, LFE, Back L, Back R, Front LofC, Front RofC
CHANNEL_LAYOUT_7_1_WIDE_BACK = 27,
// Front L, Front R, Front C, Side L, Side R, Rear L, Back R, Back C.
CHANNEL_LAYOUT_OCTAGONAL = 28,
// Channels are not explicitly mapped to speakers.
CHANNEL_LAYOUT_DISCRETE = 29,
// Front L, Front R, Front C. Front C contains the keyboard mic audio. This
// layout is only intended for input for WebRTC. The Front C channel
// is stripped away in the WebRTC audio input pipeline and never seen outside
// of that.
CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC = 30,
// Front L, Front R, Side L, Side R, LFE
CHANNEL_LAYOUT_4_1_QUAD_SIDE = 31,
// Actual channel layout is specified in the bitstream and the actual channel
// count is unknown at Chromium media pipeline level (useful for audio
// pass-through mode).
CHANNEL_LAYOUT_BITSTREAM = 32,
// Max value, must always equal the largest entry ever logged.
CHANNEL_LAYOUT_MAX = CHANNEL_LAYOUT_BITSTREAM
};
// Note: Do not reorder or reassign these values; other code depends on their
// ordering to operate correctly. E.g., CoreAudio channel layout computations.
enum Channels {
LEFT = 0,
RIGHT,
CENTER,
LFE,
BACK_LEFT,
BACK_RIGHT,
LEFT_OF_CENTER,
RIGHT_OF_CENTER,
BACK_CENTER,
SIDE_LEFT,
SIDE_RIGHT,
CHANNELS_MAX =
SIDE_RIGHT, // Must always equal the largest value ever logged.
};
// The maximum number of concurrently active channels for all possible layouts.
// ChannelLayoutToChannelCount() will never return a value higher than this.
constexpr int kMaxConcurrentChannels = 8;
// Returns the expected channel position in an interleaved stream. Values of -1
// mean the channel at that index is not used for that layout. Values range
// from 0 to ChannelLayoutToChannelCount(layout) - 1.
int ChannelOrder(ChannelLayout layout, Channels channel);
// Returns the number of channels in a given ChannelLayout.
int ChannelLayoutToChannelCount(ChannelLayout layout);
// Given the number of channels, return the best layout,
// or return CHANNEL_LAYOUT_UNSUPPORTED if there is no good match.
ChannelLayout GuessChannelLayout(int channels);
// Returns a string representation of the channel layout.
const char* ChannelLayoutToString(ChannelLayout layout);
} // namespace webrtc
#endif // API_AUDIO_CHANNEL_LAYOUT_H_

View File

@ -0,0 +1,270 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/audio/echo_canceller3_config.h"
#include <algorithm>
#include <cmath>
#include "rtc_base/checks.h"
#include "rtc_base/numerics/safe_minmax.h"
namespace webrtc {
namespace {
bool Limit(float* value, float min, float max) {
float clamped = rtc::SafeClamp(*value, min, max);
clamped = std::isfinite(clamped) ? clamped : min;
bool res = *value == clamped;
*value = clamped;
return res;
}
bool Limit(size_t* value, size_t min, size_t max) {
size_t clamped = rtc::SafeClamp(*value, min, max);
bool res = *value == clamped;
*value = clamped;
return res;
}
bool Limit(int* value, int min, int max) {
int clamped = rtc::SafeClamp(*value, min, max);
bool res = *value == clamped;
*value = clamped;
return res;
}
bool FloorLimit(size_t* value, size_t min) {
size_t clamped = *value >= min ? *value : min;
bool res = *value == clamped;
*value = clamped;
return res;
}
} // namespace
EchoCanceller3Config::EchoCanceller3Config() = default;
EchoCanceller3Config::EchoCanceller3Config(const EchoCanceller3Config& e) =
default;
EchoCanceller3Config& EchoCanceller3Config::operator=(
const EchoCanceller3Config& e) = default;
EchoCanceller3Config::Delay::Delay() = default;
EchoCanceller3Config::Delay::Delay(const EchoCanceller3Config::Delay& e) =
default;
EchoCanceller3Config::Delay& EchoCanceller3Config::Delay::operator=(
const Delay& e) = default;
EchoCanceller3Config::EchoModel::EchoModel() = default;
EchoCanceller3Config::EchoModel::EchoModel(
const EchoCanceller3Config::EchoModel& e) = default;
EchoCanceller3Config::EchoModel& EchoCanceller3Config::EchoModel::operator=(
const EchoModel& e) = default;
EchoCanceller3Config::Suppressor::Suppressor() = default;
EchoCanceller3Config::Suppressor::Suppressor(
const EchoCanceller3Config::Suppressor& e) = default;
EchoCanceller3Config::Suppressor& EchoCanceller3Config::Suppressor::operator=(
const Suppressor& e) = default;
EchoCanceller3Config::Suppressor::MaskingThresholds::MaskingThresholds(
float enr_transparent,
float enr_suppress,
float emr_transparent)
: enr_transparent(enr_transparent),
enr_suppress(enr_suppress),
emr_transparent(emr_transparent) {}
EchoCanceller3Config::Suppressor::MaskingThresholds::MaskingThresholds(
const EchoCanceller3Config::Suppressor::MaskingThresholds& e) = default;
EchoCanceller3Config::Suppressor::MaskingThresholds&
EchoCanceller3Config::Suppressor::MaskingThresholds::operator=(
const MaskingThresholds& e) = default;
EchoCanceller3Config::Suppressor::Tuning::Tuning(MaskingThresholds mask_lf,
MaskingThresholds mask_hf,
float max_inc_factor,
float max_dec_factor_lf)
: mask_lf(mask_lf),
mask_hf(mask_hf),
max_inc_factor(max_inc_factor),
max_dec_factor_lf(max_dec_factor_lf) {}
EchoCanceller3Config::Suppressor::Tuning::Tuning(
const EchoCanceller3Config::Suppressor::Tuning& e) = default;
EchoCanceller3Config::Suppressor::Tuning&
EchoCanceller3Config::Suppressor::Tuning::operator=(const Tuning& e) = default;
bool EchoCanceller3Config::Validate(EchoCanceller3Config* config) {
RTC_DCHECK(config);
EchoCanceller3Config* c = config;
bool res = true;
if (c->delay.down_sampling_factor != 4 &&
c->delay.down_sampling_factor != 8) {
c->delay.down_sampling_factor = 4;
res = false;
}
res = res & Limit(&c->delay.default_delay, 0, 5000);
res = res & Limit(&c->delay.num_filters, 0, 5000);
res = res & Limit(&c->delay.delay_headroom_samples, 0, 5000);
res = res & Limit(&c->delay.hysteresis_limit_blocks, 0, 5000);
res = res & Limit(&c->delay.fixed_capture_delay_samples, 0, 5000);
res = res & Limit(&c->delay.delay_estimate_smoothing, 0.f, 1.f);
res = res & Limit(&c->delay.delay_candidate_detection_threshold, 0.f, 1.f);
res = res & Limit(&c->delay.delay_selection_thresholds.initial, 1, 250);
res = res & Limit(&c->delay.delay_selection_thresholds.converged, 1, 250);
res = res & FloorLimit(&c->filter.refined.length_blocks, 1);
res = res & Limit(&c->filter.refined.leakage_converged, 0.f, 1000.f);
res = res & Limit(&c->filter.refined.leakage_diverged, 0.f, 1000.f);
res = res & Limit(&c->filter.refined.error_floor, 0.f, 1000.f);
res = res & Limit(&c->filter.refined.error_ceil, 0.f, 100000000.f);
res = res & Limit(&c->filter.refined.noise_gate, 0.f, 100000000.f);
res = res & FloorLimit(&c->filter.refined_initial.length_blocks, 1);
res = res & Limit(&c->filter.refined_initial.leakage_converged, 0.f, 1000.f);
res = res & Limit(&c->filter.refined_initial.leakage_diverged, 0.f, 1000.f);
res = res & Limit(&c->filter.refined_initial.error_floor, 0.f, 1000.f);
res = res & Limit(&c->filter.refined_initial.error_ceil, 0.f, 100000000.f);
res = res & Limit(&c->filter.refined_initial.noise_gate, 0.f, 100000000.f);
if (c->filter.refined.length_blocks <
c->filter.refined_initial.length_blocks) {
c->filter.refined_initial.length_blocks = c->filter.refined.length_blocks;
res = false;
}
res = res & FloorLimit(&c->filter.coarse.length_blocks, 1);
res = res & Limit(&c->filter.coarse.rate, 0.f, 1.f);
res = res & Limit(&c->filter.coarse.noise_gate, 0.f, 100000000.f);
res = res & FloorLimit(&c->filter.coarse_initial.length_blocks, 1);
res = res & Limit(&c->filter.coarse_initial.rate, 0.f, 1.f);
res = res & Limit(&c->filter.coarse_initial.noise_gate, 0.f, 100000000.f);
if (c->filter.coarse.length_blocks < c->filter.coarse_initial.length_blocks) {
c->filter.coarse_initial.length_blocks = c->filter.coarse.length_blocks;
res = false;
}
res = res & Limit(&c->filter.config_change_duration_blocks, 0, 100000);
res = res & Limit(&c->filter.initial_state_seconds, 0.f, 100.f);
res = res & Limit(&c->erle.min, 1.f, 100000.f);
res = res & Limit(&c->erle.max_l, 1.f, 100000.f);
res = res & Limit(&c->erle.max_h, 1.f, 100000.f);
if (c->erle.min > c->erle.max_l || c->erle.min > c->erle.max_h) {
c->erle.min = std::min(c->erle.max_l, c->erle.max_h);
res = false;
}
res = res & Limit(&c->erle.num_sections, 1, c->filter.refined.length_blocks);
res = res & Limit(&c->ep_strength.default_gain, 0.f, 1000000.f);
res = res & Limit(&c->ep_strength.default_len, -1.f, 1.f);
res =
res & Limit(&c->echo_audibility.low_render_limit, 0.f, 32768.f * 32768.f);
res = res &
Limit(&c->echo_audibility.normal_render_limit, 0.f, 32768.f * 32768.f);
res = res & Limit(&c->echo_audibility.floor_power, 0.f, 32768.f * 32768.f);
res = res & Limit(&c->echo_audibility.audibility_threshold_lf, 0.f,
32768.f * 32768.f);
res = res & Limit(&c->echo_audibility.audibility_threshold_mf, 0.f,
32768.f * 32768.f);
res = res & Limit(&c->echo_audibility.audibility_threshold_hf, 0.f,
32768.f * 32768.f);
res = res &
Limit(&c->render_levels.active_render_limit, 0.f, 32768.f * 32768.f);
res = res & Limit(&c->render_levels.poor_excitation_render_limit, 0.f,
32768.f * 32768.f);
res = res & Limit(&c->render_levels.poor_excitation_render_limit_ds8, 0.f,
32768.f * 32768.f);
res = res & Limit(&c->echo_model.noise_floor_hold, 0, 1000);
res = res & Limit(&c->echo_model.min_noise_floor_power, 0, 2000000.f);
res = res & Limit(&c->echo_model.stationary_gate_slope, 0, 1000000.f);
res = res & Limit(&c->echo_model.noise_gate_power, 0, 1000000.f);
res = res & Limit(&c->echo_model.noise_gate_slope, 0, 1000000.f);
res = res & Limit(&c->echo_model.render_pre_window_size, 0, 100);
res = res & Limit(&c->echo_model.render_post_window_size, 0, 100);
res = res & Limit(&c->comfort_noise.noise_floor_dbfs, -200.f, 0.f);
res = res & Limit(&c->suppressor.nearend_average_blocks, 1, 5000);
res = res &
Limit(&c->suppressor.normal_tuning.mask_lf.enr_transparent, 0.f, 100.f);
res = res &
Limit(&c->suppressor.normal_tuning.mask_lf.enr_suppress, 0.f, 100.f);
res = res &
Limit(&c->suppressor.normal_tuning.mask_lf.emr_transparent, 0.f, 100.f);
res = res &
Limit(&c->suppressor.normal_tuning.mask_hf.enr_transparent, 0.f, 100.f);
res = res &
Limit(&c->suppressor.normal_tuning.mask_hf.enr_suppress, 0.f, 100.f);
res = res &
Limit(&c->suppressor.normal_tuning.mask_hf.emr_transparent, 0.f, 100.f);
res = res & Limit(&c->suppressor.normal_tuning.max_inc_factor, 0.f, 100.f);
res = res & Limit(&c->suppressor.normal_tuning.max_dec_factor_lf, 0.f, 100.f);
res = res & Limit(&c->suppressor.nearend_tuning.mask_lf.enr_transparent, 0.f,
100.f);
res = res &
Limit(&c->suppressor.nearend_tuning.mask_lf.enr_suppress, 0.f, 100.f);
res = res & Limit(&c->suppressor.nearend_tuning.mask_lf.emr_transparent, 0.f,
100.f);
res = res & Limit(&c->suppressor.nearend_tuning.mask_hf.enr_transparent, 0.f,
100.f);
res = res &
Limit(&c->suppressor.nearend_tuning.mask_hf.enr_suppress, 0.f, 100.f);
res = res & Limit(&c->suppressor.nearend_tuning.mask_hf.emr_transparent, 0.f,
100.f);
res = res & Limit(&c->suppressor.nearend_tuning.max_inc_factor, 0.f, 100.f);
res =
res & Limit(&c->suppressor.nearend_tuning.max_dec_factor_lf, 0.f, 100.f);
res = res & Limit(&c->suppressor.dominant_nearend_detection.enr_threshold,
0.f, 1000000.f);
res = res & Limit(&c->suppressor.dominant_nearend_detection.snr_threshold,
0.f, 1000000.f);
res = res & Limit(&c->suppressor.dominant_nearend_detection.hold_duration, 0,
10000);
res = res & Limit(&c->suppressor.dominant_nearend_detection.trigger_threshold,
0, 10000);
res = res &
Limit(&c->suppressor.subband_nearend_detection.nearend_average_blocks,
1, 1024);
res =
res & Limit(&c->suppressor.subband_nearend_detection.subband1.low, 0, 65);
res = res & Limit(&c->suppressor.subband_nearend_detection.subband1.high,
c->suppressor.subband_nearend_detection.subband1.low, 65);
res =
res & Limit(&c->suppressor.subband_nearend_detection.subband2.low, 0, 65);
res = res & Limit(&c->suppressor.subband_nearend_detection.subband2.high,
c->suppressor.subband_nearend_detection.subband2.low, 65);
res = res & Limit(&c->suppressor.subband_nearend_detection.nearend_threshold,
0.f, 1.e24f);
res = res & Limit(&c->suppressor.subband_nearend_detection.snr_threshold, 0.f,
1.e24f);
res = res & Limit(&c->suppressor.high_bands_suppression.enr_threshold, 0.f,
1000000.f);
res = res & Limit(&c->suppressor.high_bands_suppression.max_gain_during_echo,
0.f, 1.f);
res = res & Limit(&c->suppressor.high_bands_suppression
.anti_howling_activation_threshold,
0.f, 32768.f * 32768.f);
res = res & Limit(&c->suppressor.high_bands_suppression.anti_howling_gain,
0.f, 1.f);
res = res & Limit(&c->suppressor.floor_first_increase, 0.f, 1000000.f);
return res;
}
} // namespace webrtc

View File

@ -0,0 +1,228 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_AUDIO_ECHO_CANCELLER3_CONFIG_H_
#define API_AUDIO_ECHO_CANCELLER3_CONFIG_H_
#include <stddef.h> // size_t
#include "rtc_base/system/rtc_export.h"
namespace webrtc {
// Configuration struct for EchoCanceller3
struct RTC_EXPORT EchoCanceller3Config {
// Checks and updates the config parameters to lie within (mostly) reasonable
// ranges. Returns true if and only of the config did not need to be changed.
static bool Validate(EchoCanceller3Config* config);
EchoCanceller3Config();
EchoCanceller3Config(const EchoCanceller3Config& e);
EchoCanceller3Config& operator=(const EchoCanceller3Config& other);
struct Buffering {
size_t excess_render_detection_interval_blocks = 250;
size_t max_allowed_excess_render_blocks = 8;
} buffering;
struct Delay {
Delay();
Delay(const Delay& e);
Delay& operator=(const Delay& e);
size_t default_delay = 5;
size_t down_sampling_factor = 4;
size_t num_filters = 5;
size_t delay_headroom_samples = 32;
size_t hysteresis_limit_blocks = 1;
size_t fixed_capture_delay_samples = 0;
float delay_estimate_smoothing = 0.7f;
float delay_candidate_detection_threshold = 0.2f;
struct DelaySelectionThresholds {
int initial;
int converged;
} delay_selection_thresholds = {5, 20};
bool use_external_delay_estimator = false;
bool log_warning_on_delay_changes = false;
struct AlignmentMixing {
bool downmix;
bool adaptive_selection;
float activity_power_threshold;
bool prefer_first_two_channels;
};
AlignmentMixing render_alignment_mixing = {false, true, 10000.f, true};
AlignmentMixing capture_alignment_mixing = {false, true, 10000.f, false};
} delay;
struct Filter {
struct RefinedConfiguration {
size_t length_blocks;
float leakage_converged;
float leakage_diverged;
float error_floor;
float error_ceil;
float noise_gate;
};
struct CoarseConfiguration {
size_t length_blocks;
float rate;
float noise_gate;
};
RefinedConfiguration refined = {13, 0.00005f, 0.05f,
0.001f, 2.f, 20075344.f};
CoarseConfiguration coarse = {13, 0.7f, 20075344.f};
RefinedConfiguration refined_initial = {12, 0.005f, 0.5f,
0.001f, 2.f, 20075344.f};
CoarseConfiguration coarse_initial = {12, 0.9f, 20075344.f};
size_t config_change_duration_blocks = 250;
float initial_state_seconds = 2.5f;
bool conservative_initial_phase = false;
bool enable_coarse_filter_output_usage = true;
bool use_linear_filter = true;
bool export_linear_aec_output = false;
} filter;
struct Erle {
float min = 1.f;
float max_l = 4.f;
float max_h = 1.5f;
bool onset_detection = true;
size_t num_sections = 1;
bool clamp_quality_estimate_to_zero = true;
bool clamp_quality_estimate_to_one = true;
} erle;
struct EpStrength {
float default_gain = 1.f;
float default_len = 0.83f;
bool echo_can_saturate = true;
bool bounded_erl = false;
} ep_strength;
struct EchoAudibility {
float low_render_limit = 4 * 64.f;
float normal_render_limit = 64.f;
float floor_power = 2 * 64.f;
float audibility_threshold_lf = 10;
float audibility_threshold_mf = 10;
float audibility_threshold_hf = 10;
bool use_stationarity_properties = false;
bool use_stationarity_properties_at_init = false;
} echo_audibility;
struct RenderLevels {
float active_render_limit = 100.f;
float poor_excitation_render_limit = 150.f;
float poor_excitation_render_limit_ds8 = 20.f;
float render_power_gain_db = 0.f;
} render_levels;
struct EchoRemovalControl {
bool has_clock_drift = false;
bool linear_and_stable_echo_path = false;
} echo_removal_control;
struct EchoModel {
EchoModel();
EchoModel(const EchoModel& e);
EchoModel& operator=(const EchoModel& e);
size_t noise_floor_hold = 50;
float min_noise_floor_power = 1638400.f;
float stationary_gate_slope = 10.f;
float noise_gate_power = 27509.42f;
float noise_gate_slope = 0.3f;
size_t render_pre_window_size = 1;
size_t render_post_window_size = 1;
bool model_reverb_in_nonlinear_mode = true;
} echo_model;
struct ComfortNoise {
float noise_floor_dbfs = -96.03406f;
} comfort_noise;
struct Suppressor {
Suppressor();
Suppressor(const Suppressor& e);
Suppressor& operator=(const Suppressor& e);
size_t nearend_average_blocks = 4;
struct MaskingThresholds {
MaskingThresholds(float enr_transparent,
float enr_suppress,
float emr_transparent);
MaskingThresholds(const MaskingThresholds& e);
MaskingThresholds& operator=(const MaskingThresholds& e);
float enr_transparent;
float enr_suppress;
float emr_transparent;
};
struct Tuning {
Tuning(MaskingThresholds mask_lf,
MaskingThresholds mask_hf,
float max_inc_factor,
float max_dec_factor_lf);
Tuning(const Tuning& e);
Tuning& operator=(const Tuning& e);
MaskingThresholds mask_lf;
MaskingThresholds mask_hf;
float max_inc_factor;
float max_dec_factor_lf;
};
Tuning normal_tuning = Tuning(MaskingThresholds(.3f, .4f, .3f),
MaskingThresholds(.07f, .1f, .3f),
2.0f,
0.25f);
Tuning nearend_tuning = Tuning(MaskingThresholds(1.09f, 1.1f, .3f),
MaskingThresholds(.1f, .3f, .3f),
2.0f,
0.25f);
struct DominantNearendDetection {
float enr_threshold = .25f;
float enr_exit_threshold = 10.f;
float snr_threshold = 30.f;
int hold_duration = 50;
int trigger_threshold = 12;
bool use_during_initial_phase = true;
} dominant_nearend_detection;
struct SubbandNearendDetection {
size_t nearend_average_blocks = 1;
struct SubbandRegion {
size_t low;
size_t high;
};
SubbandRegion subband1 = {1, 1};
SubbandRegion subband2 = {1, 1};
float nearend_threshold = 1.f;
float snr_threshold = 1.f;
} subband_nearend_detection;
bool use_subband_nearend_detection = false;
struct HighBandsSuppression {
float enr_threshold = 1.f;
float max_gain_during_echo = 1.f;
float anti_howling_activation_threshold = 400.f;
float anti_howling_gain = 1.f;
} high_bands_suppression;
float floor_first_increase = 0.00001f;
} suppressor;
};
} // namespace webrtc
#endif // API_AUDIO_ECHO_CANCELLER3_CONFIG_H_

View File

@ -0,0 +1,68 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_AUDIO_ECHO_CONTROL_H_
#define API_AUDIO_ECHO_CONTROL_H_
#include <memory>
#include "rtc_base/checks.h"
namespace webrtc {
class AudioBuffer;
// Interface for an acoustic echo cancellation (AEC) submodule.
class EchoControl {
public:
// Analysis (not changing) of the render signal.
virtual void AnalyzeRender(AudioBuffer* render) = 0;
// Analysis (not changing) of the capture signal.
virtual void AnalyzeCapture(AudioBuffer* capture) = 0;
// Processes the capture signal in order to remove the echo.
virtual void ProcessCapture(AudioBuffer* capture, bool level_change) = 0;
// As above, but also returns the linear filter output.
virtual void ProcessCapture(AudioBuffer* capture,
AudioBuffer* linear_output,
bool level_change) = 0;
struct Metrics {
double echo_return_loss;
double echo_return_loss_enhancement;
int delay_ms;
};
// Collect current metrics from the echo controller.
virtual Metrics GetMetrics() const = 0;
// Provides an optional external estimate of the audio buffer delay.
virtual void SetAudioBufferDelay(int delay_ms) = 0;
// Returns wheter the signal is altered.
virtual bool ActiveProcessing() const = 0;
virtual ~EchoControl() {}
};
// Interface for a factory that creates EchoControllers.
class EchoControlFactory {
public:
virtual std::unique_ptr<EchoControl> Create(int sample_rate_hz,
int num_render_channels,
int num_capture_channels) = 0;
virtual ~EchoControlFactory() = default;
};
} // namespace webrtc
#endif // API_AUDIO_ECHO_CONTROL_H_

View File

@ -0,0 +1,170 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/audio_codecs/audio_decoder.h"
#include <assert.h>
#include <memory>
#include <utility>
#include "api/array_view.h"
#include "rtc_base/checks.h"
#include "rtc_base/sanitizer.h"
#include "rtc_base/trace_event.h"
namespace webrtc {
namespace {
class OldStyleEncodedFrame final : public AudioDecoder::EncodedAudioFrame {
public:
OldStyleEncodedFrame(AudioDecoder* decoder, rtc::Buffer&& payload)
: decoder_(decoder), payload_(std::move(payload)) {}
size_t Duration() const override {
const int ret = decoder_->PacketDuration(payload_.data(), payload_.size());
return ret < 0 ? 0 : static_cast<size_t>(ret);
}
absl::optional<DecodeResult> Decode(
rtc::ArrayView<int16_t> decoded) const override {
auto speech_type = AudioDecoder::kSpeech;
const int ret = decoder_->Decode(
payload_.data(), payload_.size(), decoder_->SampleRateHz(),
decoded.size() * sizeof(int16_t), decoded.data(), &speech_type);
return ret < 0 ? absl::nullopt
: absl::optional<DecodeResult>(
{static_cast<size_t>(ret), speech_type});
}
private:
AudioDecoder* const decoder_;
const rtc::Buffer payload_;
};
} // namespace
bool AudioDecoder::EncodedAudioFrame::IsDtxPacket() const {
return false;
}
AudioDecoder::ParseResult::ParseResult() = default;
AudioDecoder::ParseResult::ParseResult(ParseResult&& b) = default;
AudioDecoder::ParseResult::ParseResult(uint32_t timestamp,
int priority,
std::unique_ptr<EncodedAudioFrame> frame)
: timestamp(timestamp), priority(priority), frame(std::move(frame)) {
RTC_DCHECK_GE(priority, 0);
}
AudioDecoder::ParseResult::~ParseResult() = default;
AudioDecoder::ParseResult& AudioDecoder::ParseResult::operator=(
ParseResult&& b) = default;
std::vector<AudioDecoder::ParseResult> AudioDecoder::ParsePayload(
rtc::Buffer&& payload,
uint32_t timestamp) {
std::vector<ParseResult> results;
std::unique_ptr<EncodedAudioFrame> frame(
new OldStyleEncodedFrame(this, std::move(payload)));
results.emplace_back(timestamp, 0, std::move(frame));
return results;
}
int AudioDecoder::Decode(const uint8_t* encoded,
size_t encoded_len,
int sample_rate_hz,
size_t max_decoded_bytes,
int16_t* decoded,
SpeechType* speech_type) {
TRACE_EVENT0("webrtc", "AudioDecoder::Decode");
rtc::MsanCheckInitialized(rtc::MakeArrayView(encoded, encoded_len));
int duration = PacketDuration(encoded, encoded_len);
if (duration >= 0 &&
duration * Channels() * sizeof(int16_t) > max_decoded_bytes) {
return -1;
}
return DecodeInternal(encoded, encoded_len, sample_rate_hz, decoded,
speech_type);
}
int AudioDecoder::DecodeRedundant(const uint8_t* encoded,
size_t encoded_len,
int sample_rate_hz,
size_t max_decoded_bytes,
int16_t* decoded,
SpeechType* speech_type) {
TRACE_EVENT0("webrtc", "AudioDecoder::DecodeRedundant");
rtc::MsanCheckInitialized(rtc::MakeArrayView(encoded, encoded_len));
int duration = PacketDurationRedundant(encoded, encoded_len);
if (duration >= 0 &&
duration * Channels() * sizeof(int16_t) > max_decoded_bytes) {
return -1;
}
return DecodeRedundantInternal(encoded, encoded_len, sample_rate_hz, decoded,
speech_type);
}
int AudioDecoder::DecodeRedundantInternal(const uint8_t* encoded,
size_t encoded_len,
int sample_rate_hz,
int16_t* decoded,
SpeechType* speech_type) {
return DecodeInternal(encoded, encoded_len, sample_rate_hz, decoded,
speech_type);
}
bool AudioDecoder::HasDecodePlc() const {
return false;
}
size_t AudioDecoder::DecodePlc(size_t num_frames, int16_t* decoded) {
return 0;
}
// TODO(bugs.webrtc.org/9676): Remove default implementation.
void AudioDecoder::GeneratePlc(size_t /*requested_samples_per_channel*/,
rtc::BufferT<int16_t>* /*concealment_audio*/) {}
int AudioDecoder::ErrorCode() {
return 0;
}
int AudioDecoder::PacketDuration(const uint8_t* encoded,
size_t encoded_len) const {
return kNotImplemented;
}
int AudioDecoder::PacketDurationRedundant(const uint8_t* encoded,
size_t encoded_len) const {
return kNotImplemented;
}
bool AudioDecoder::PacketHasFec(const uint8_t* encoded,
size_t encoded_len) const {
return false;
}
AudioDecoder::SpeechType AudioDecoder::ConvertSpeechType(int16_t type) {
switch (type) {
case 0: // TODO(hlundin): Both iSAC and Opus return 0 for speech.
case 1:
return kSpeech;
case 2:
return kComfortNoise;
default:
assert(false);
return kSpeech;
}
}
} // namespace webrtc

View File

@ -0,0 +1,193 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_AUDIO_CODECS_AUDIO_DECODER_H_
#define API_AUDIO_CODECS_AUDIO_DECODER_H_
#include <stddef.h>
#include <stdint.h>
#include <memory>
#include <vector>
#include "absl/types/optional.h"
#include "api/array_view.h"
#include "rtc_base/buffer.h"
#include "rtc_base/constructor_magic.h"
namespace webrtc {
class AudioDecoder {
public:
enum SpeechType {
kSpeech = 1,
kComfortNoise = 2,
};
// Used by PacketDuration below. Save the value -1 for errors.
enum { kNotImplemented = -2 };
AudioDecoder() = default;
virtual ~AudioDecoder() = default;
class EncodedAudioFrame {
public:
struct DecodeResult {
size_t num_decoded_samples;
SpeechType speech_type;
};
virtual ~EncodedAudioFrame() = default;
// Returns the duration in samples-per-channel of this audio frame.
// If no duration can be ascertained, returns zero.
virtual size_t Duration() const = 0;
// Returns true if this packet contains DTX.
virtual bool IsDtxPacket() const;
// Decodes this frame of audio and writes the result in |decoded|.
// |decoded| must be large enough to store as many samples as indicated by a
// call to Duration() . On success, returns an absl::optional containing the
// total number of samples across all channels, as well as whether the
// decoder produced comfort noise or speech. On failure, returns an empty
// absl::optional. Decode may be called at most once per frame object.
virtual absl::optional<DecodeResult> Decode(
rtc::ArrayView<int16_t> decoded) const = 0;
};
struct ParseResult {
ParseResult();
ParseResult(uint32_t timestamp,
int priority,
std::unique_ptr<EncodedAudioFrame> frame);
ParseResult(ParseResult&& b);
~ParseResult();
ParseResult& operator=(ParseResult&& b);
// The timestamp of the frame is in samples per channel.
uint32_t timestamp;
// The relative priority of the frame compared to other frames of the same
// payload and the same timeframe. A higher value means a lower priority.
// The highest priority is zero - negative values are not allowed.
int priority;
std::unique_ptr<EncodedAudioFrame> frame;
};
// Let the decoder parse this payload and prepare zero or more decodable
// frames. Each frame must be between 10 ms and 120 ms long. The caller must
// ensure that the AudioDecoder object outlives any frame objects returned by
// this call. The decoder is free to swap or move the data from the |payload|
// buffer. |timestamp| is the input timestamp, in samples, corresponding to
// the start of the payload.
virtual std::vector<ParseResult> ParsePayload(rtc::Buffer&& payload,
uint32_t timestamp);
// TODO(bugs.webrtc.org/10098): The Decode and DecodeRedundant methods are
// obsolete; callers should call ParsePayload instead. For now, subclasses
// must still implement DecodeInternal.
// Decodes |encode_len| bytes from |encoded| and writes the result in
// |decoded|. The maximum bytes allowed to be written into |decoded| is
// |max_decoded_bytes|. Returns the total number of samples across all
// channels. If the decoder produced comfort noise, |speech_type|
// is set to kComfortNoise, otherwise it is kSpeech. The desired output
// sample rate is provided in |sample_rate_hz|, which must be valid for the
// codec at hand.
int Decode(const uint8_t* encoded,
size_t encoded_len,
int sample_rate_hz,
size_t max_decoded_bytes,
int16_t* decoded,
SpeechType* speech_type);
// Same as Decode(), but interfaces to the decoders redundant decode function.
// The default implementation simply calls the regular Decode() method.
int DecodeRedundant(const uint8_t* encoded,
size_t encoded_len,
int sample_rate_hz,
size_t max_decoded_bytes,
int16_t* decoded,
SpeechType* speech_type);
// Indicates if the decoder implements the DecodePlc method.
virtual bool HasDecodePlc() const;
// Calls the packet-loss concealment of the decoder to update the state after
// one or several lost packets. The caller has to make sure that the
// memory allocated in |decoded| should accommodate |num_frames| frames.
virtual size_t DecodePlc(size_t num_frames, int16_t* decoded);
// Asks the decoder to generate packet-loss concealment and append it to the
// end of |concealment_audio|. The concealment audio should be in
// channel-interleaved format, with as many channels as the last decoded
// packet produced. The implementation must produce at least
// requested_samples_per_channel, or nothing at all. This is a signal to the
// caller to conceal the loss with other means. If the implementation provides
// concealment samples, it is also responsible for "stitching" it together
// with the decoded audio on either side of the concealment.
// Note: The default implementation of GeneratePlc will be deleted soon. All
// implementations must provide their own, which can be a simple as a no-op.
// TODO(bugs.webrtc.org/9676): Remove default impementation.
virtual void GeneratePlc(size_t requested_samples_per_channel,
rtc::BufferT<int16_t>* concealment_audio);
// Resets the decoder state (empty buffers etc.).
virtual void Reset() = 0;
// Returns the last error code from the decoder.
virtual int ErrorCode();
// Returns the duration in samples-per-channel of the payload in |encoded|
// which is |encoded_len| bytes long. Returns kNotImplemented if no duration
// estimate is available, or -1 in case of an error.
virtual int PacketDuration(const uint8_t* encoded, size_t encoded_len) const;
// Returns the duration in samples-per-channel of the redandant payload in
// |encoded| which is |encoded_len| bytes long. Returns kNotImplemented if no
// duration estimate is available, or -1 in case of an error.
virtual int PacketDurationRedundant(const uint8_t* encoded,
size_t encoded_len) const;
// Detects whether a packet has forward error correction. The packet is
// comprised of the samples in |encoded| which is |encoded_len| bytes long.
// Returns true if the packet has FEC and false otherwise.
virtual bool PacketHasFec(const uint8_t* encoded, size_t encoded_len) const;
// Returns the actual sample rate of the decoder's output. This value may not
// change during the lifetime of the decoder.
virtual int SampleRateHz() const = 0;
// The number of channels in the decoder's output. This value may not change
// during the lifetime of the decoder.
virtual size_t Channels() const = 0;
protected:
static SpeechType ConvertSpeechType(int16_t type);
virtual int DecodeInternal(const uint8_t* encoded,
size_t encoded_len,
int sample_rate_hz,
int16_t* decoded,
SpeechType* speech_type) = 0;
virtual int DecodeRedundantInternal(const uint8_t* encoded,
size_t encoded_len,
int sample_rate_hz,
int16_t* decoded,
SpeechType* speech_type);
private:
RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoder);
};
} // namespace webrtc
#endif // API_AUDIO_CODECS_AUDIO_DECODER_H_

View File

@ -0,0 +1,113 @@
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/audio_codecs/audio_encoder.h"
#include "rtc_base/checks.h"
#include "rtc_base/trace_event.h"
namespace webrtc {
ANAStats::ANAStats() = default;
ANAStats::~ANAStats() = default;
ANAStats::ANAStats(const ANAStats&) = default;
AudioEncoder::EncodedInfo::EncodedInfo() = default;
AudioEncoder::EncodedInfo::EncodedInfo(const EncodedInfo&) = default;
AudioEncoder::EncodedInfo::EncodedInfo(EncodedInfo&&) = default;
AudioEncoder::EncodedInfo::~EncodedInfo() = default;
AudioEncoder::EncodedInfo& AudioEncoder::EncodedInfo::operator=(
const EncodedInfo&) = default;
AudioEncoder::EncodedInfo& AudioEncoder::EncodedInfo::operator=(EncodedInfo&&) =
default;
int AudioEncoder::RtpTimestampRateHz() const {
return SampleRateHz();
}
AudioEncoder::EncodedInfo AudioEncoder::Encode(
uint32_t rtp_timestamp,
rtc::ArrayView<const int16_t> audio,
rtc::Buffer* encoded) {
TRACE_EVENT0("webrtc", "AudioEncoder::Encode");
RTC_CHECK_EQ(audio.size(),
static_cast<size_t>(NumChannels() * SampleRateHz() / 100));
const size_t old_size = encoded->size();
EncodedInfo info = EncodeImpl(rtp_timestamp, audio, encoded);
RTC_CHECK_EQ(encoded->size() - old_size, info.encoded_bytes);
return info;
}
bool AudioEncoder::SetFec(bool enable) {
return !enable;
}
bool AudioEncoder::SetDtx(bool enable) {
return !enable;
}
bool AudioEncoder::GetDtx() const {
return false;
}
bool AudioEncoder::SetApplication(Application application) {
return false;
}
void AudioEncoder::SetMaxPlaybackRate(int frequency_hz) {}
void AudioEncoder::SetTargetBitrate(int target_bps) {}
rtc::ArrayView<std::unique_ptr<AudioEncoder>>
AudioEncoder::ReclaimContainedEncoders() {
return nullptr;
}
bool AudioEncoder::EnableAudioNetworkAdaptor(const std::string& config_string,
RtcEventLog* event_log) {
return false;
}
void AudioEncoder::DisableAudioNetworkAdaptor() {}
void AudioEncoder::OnReceivedUplinkPacketLossFraction(
float uplink_packet_loss_fraction) {}
void AudioEncoder::OnReceivedUplinkRecoverablePacketLossFraction(
float uplink_recoverable_packet_loss_fraction) {
RTC_NOTREACHED();
}
void AudioEncoder::OnReceivedTargetAudioBitrate(int target_audio_bitrate_bps) {
OnReceivedUplinkBandwidth(target_audio_bitrate_bps, absl::nullopt);
}
void AudioEncoder::OnReceivedUplinkBandwidth(
int target_audio_bitrate_bps,
absl::optional<int64_t> bwe_period_ms) {}
void AudioEncoder::OnReceivedUplinkAllocation(BitrateAllocationUpdate update) {
OnReceivedUplinkBandwidth(update.target_bitrate.bps(),
update.bwe_period.ms());
}
void AudioEncoder::OnReceivedRtt(int rtt_ms) {}
void AudioEncoder::OnReceivedOverhead(size_t overhead_bytes_per_packet) {}
void AudioEncoder::SetReceiverFrameLengthRange(int min_frame_length_ms,
int max_frame_length_ms) {}
ANAStats AudioEncoder::GetANAStats() const {
return ANAStats();
}
} // namespace webrtc

View File

@ -0,0 +1,257 @@
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_AUDIO_CODECS_AUDIO_ENCODER_H_
#define API_AUDIO_CODECS_AUDIO_ENCODER_H_
#include <memory>
#include <string>
#include <utility>
#include <vector>
#include "absl/types/optional.h"
#include "api/array_view.h"
#include "api/call/bitrate_allocation.h"
#include "api/units/time_delta.h"
#include "rtc_base/buffer.h"
#include "rtc_base/deprecation.h"
namespace webrtc {
class RtcEventLog;
// Statistics related to Audio Network Adaptation.
struct ANAStats {
ANAStats();
ANAStats(const ANAStats&);
~ANAStats();
// Number of actions taken by the ANA bitrate controller since the start of
// the call. If this value is not set, it indicates that the bitrate
// controller is disabled.
absl::optional<uint32_t> bitrate_action_counter;
// Number of actions taken by the ANA channel controller since the start of
// the call. If this value is not set, it indicates that the channel
// controller is disabled.
absl::optional<uint32_t> channel_action_counter;
// Number of actions taken by the ANA DTX controller since the start of the
// call. If this value is not set, it indicates that the DTX controller is
// disabled.
absl::optional<uint32_t> dtx_action_counter;
// Number of actions taken by the ANA FEC controller since the start of the
// call. If this value is not set, it indicates that the FEC controller is
// disabled.
absl::optional<uint32_t> fec_action_counter;
// Number of times the ANA frame length controller decided to increase the
// frame length since the start of the call. If this value is not set, it
// indicates that the frame length controller is disabled.
absl::optional<uint32_t> frame_length_increase_counter;
// Number of times the ANA frame length controller decided to decrease the
// frame length since the start of the call. If this value is not set, it
// indicates that the frame length controller is disabled.
absl::optional<uint32_t> frame_length_decrease_counter;
// The uplink packet loss fractions as set by the ANA FEC controller. If this
// value is not set, it indicates that the ANA FEC controller is not active.
absl::optional<float> uplink_packet_loss_fraction;
};
// This is the interface class for encoders in AudioCoding module. Each codec
// type must have an implementation of this class.
class AudioEncoder {
public:
// Used for UMA logging of codec usage. The same codecs, with the
// same values, must be listed in
// src/tools/metrics/histograms/histograms.xml in chromium to log
// correct values.
enum class CodecType {
kOther = 0, // Codec not specified, and/or not listed in this enum
kOpus = 1,
kIsac = 2,
kPcmA = 3,
kPcmU = 4,
kG722 = 5,
kIlbc = 6,
// Number of histogram bins in the UMA logging of codec types. The
// total number of different codecs that are logged cannot exceed this
// number.
kMaxLoggedAudioCodecTypes
};
struct EncodedInfoLeaf {
size_t encoded_bytes = 0;
uint32_t encoded_timestamp = 0;
int payload_type = 0;
bool send_even_if_empty = false;
bool speech = true;
CodecType encoder_type = CodecType::kOther;
};
// This is the main struct for auxiliary encoding information. Each encoded
// packet should be accompanied by one EncodedInfo struct, containing the
// total number of |encoded_bytes|, the |encoded_timestamp| and the
// |payload_type|. If the packet contains redundant encodings, the |redundant|
// vector will be populated with EncodedInfoLeaf structs. Each struct in the
// vector represents one encoding; the order of structs in the vector is the
// same as the order in which the actual payloads are written to the byte
// stream. When EncoderInfoLeaf structs are present in the vector, the main
// struct's |encoded_bytes| will be the sum of all the |encoded_bytes| in the
// vector.
struct EncodedInfo : public EncodedInfoLeaf {
EncodedInfo();
EncodedInfo(const EncodedInfo&);
EncodedInfo(EncodedInfo&&);
~EncodedInfo();
EncodedInfo& operator=(const EncodedInfo&);
EncodedInfo& operator=(EncodedInfo&&);
std::vector<EncodedInfoLeaf> redundant;
};
virtual ~AudioEncoder() = default;
// Returns the input sample rate in Hz and the number of input channels.
// These are constants set at instantiation time.
virtual int SampleRateHz() const = 0;
virtual size_t NumChannels() const = 0;
// Returns the rate at which the RTP timestamps are updated. The default
// implementation returns SampleRateHz().
virtual int RtpTimestampRateHz() const;
// Returns the number of 10 ms frames the encoder will put in the next
// packet. This value may only change when Encode() outputs a packet; i.e.,
// the encoder may vary the number of 10 ms frames from packet to packet, but
// it must decide the length of the next packet no later than when outputting
// the preceding packet.
virtual size_t Num10MsFramesInNextPacket() const = 0;
// Returns the maximum value that can be returned by
// Num10MsFramesInNextPacket().
virtual size_t Max10MsFramesInAPacket() const = 0;
// Returns the current target bitrate in bits/s. The value -1 means that the
// codec adapts the target automatically, and a current target cannot be
// provided.
virtual int GetTargetBitrate() const = 0;
// Accepts one 10 ms block of input audio (i.e., SampleRateHz() / 100 *
// NumChannels() samples). Multi-channel audio must be sample-interleaved.
// The encoder appends zero or more bytes of output to |encoded| and returns
// additional encoding information. Encode() checks some preconditions, calls
// EncodeImpl() which does the actual work, and then checks some
// postconditions.
EncodedInfo Encode(uint32_t rtp_timestamp,
rtc::ArrayView<const int16_t> audio,
rtc::Buffer* encoded);
// Resets the encoder to its starting state, discarding any input that has
// been fed to the encoder but not yet emitted in a packet.
virtual void Reset() = 0;
// Enables or disables codec-internal FEC (forward error correction). Returns
// true if the codec was able to comply. The default implementation returns
// true when asked to disable FEC and false when asked to enable it (meaning
// that FEC isn't supported).
virtual bool SetFec(bool enable);
// Enables or disables codec-internal VAD/DTX. Returns true if the codec was
// able to comply. The default implementation returns true when asked to
// disable DTX and false when asked to enable it (meaning that DTX isn't
// supported).
virtual bool SetDtx(bool enable);
// Returns the status of codec-internal DTX. The default implementation always
// returns false.
virtual bool GetDtx() const;
// Sets the application mode. Returns true if the codec was able to comply.
// The default implementation just returns false.
enum class Application { kSpeech, kAudio };
virtual bool SetApplication(Application application);
// Tells the encoder about the highest sample rate the decoder is expected to
// use when decoding the bitstream. The encoder would typically use this
// information to adjust the quality of the encoding. The default
// implementation does nothing.
virtual void SetMaxPlaybackRate(int frequency_hz);
// This is to be deprecated. Please use |OnReceivedTargetAudioBitrate|
// instead.
// Tells the encoder what average bitrate we'd like it to produce. The
// encoder is free to adjust or disregard the given bitrate (the default
// implementation does the latter).
RTC_DEPRECATED virtual void SetTargetBitrate(int target_bps);
// Causes this encoder to let go of any other encoders it contains, and
// returns a pointer to an array where they are stored (which is required to
// live as long as this encoder). Unless the returned array is empty, you may
// not call any methods on this encoder afterwards, except for the
// destructor. The default implementation just returns an empty array.
// NOTE: This method is subject to change. Do not call or override it.
virtual rtc::ArrayView<std::unique_ptr<AudioEncoder>>
ReclaimContainedEncoders();
// Enables audio network adaptor. Returns true if successful.
virtual bool EnableAudioNetworkAdaptor(const std::string& config_string,
RtcEventLog* event_log);
// Disables audio network adaptor.
virtual void DisableAudioNetworkAdaptor();
// Provides uplink packet loss fraction to this encoder to allow it to adapt.
// |uplink_packet_loss_fraction| is in the range [0.0, 1.0].
virtual void OnReceivedUplinkPacketLossFraction(
float uplink_packet_loss_fraction);
RTC_DEPRECATED virtual void OnReceivedUplinkRecoverablePacketLossFraction(
float uplink_recoverable_packet_loss_fraction);
// Provides target audio bitrate to this encoder to allow it to adapt.
virtual void OnReceivedTargetAudioBitrate(int target_bps);
// Provides target audio bitrate and corresponding probing interval of
// the bandwidth estimator to this encoder to allow it to adapt.
virtual void OnReceivedUplinkBandwidth(int target_audio_bitrate_bps,
absl::optional<int64_t> bwe_period_ms);
// Provides target audio bitrate and corresponding probing interval of
// the bandwidth estimator to this encoder to allow it to adapt.
virtual void OnReceivedUplinkAllocation(BitrateAllocationUpdate update);
// Provides RTT to this encoder to allow it to adapt.
virtual void OnReceivedRtt(int rtt_ms);
// Provides overhead to this encoder to adapt. The overhead is the number of
// bytes that will be added to each packet the encoder generates.
virtual void OnReceivedOverhead(size_t overhead_bytes_per_packet);
// To allow encoder to adapt its frame length, it must be provided the frame
// length range that receivers can accept.
virtual void SetReceiverFrameLengthRange(int min_frame_length_ms,
int max_frame_length_ms);
// Get statistics related to audio network adaptation.
virtual ANAStats GetANAStats() const;
// The range of frame lengths that are supported or nullopt if there's no sch
// information. This is used to calculated the full bitrate range, including
// overhead.
virtual absl::optional<std::pair<TimeDelta, TimeDelta>> GetFrameLengthRange()
const = 0;
protected:
// Subclasses implement this to perform the actual encoding. Called by
// Encode().
virtual EncodedInfo EncodeImpl(uint32_t rtp_timestamp,
rtc::ArrayView<const int16_t> audio,
rtc::Buffer* encoded) = 0;
};
} // namespace webrtc
#endif // API_AUDIO_CODECS_AUDIO_ENCODER_H_

View File

@ -0,0 +1,45 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_CALL_BITRATE_ALLOCATION_H_
#define API_CALL_BITRATE_ALLOCATION_H_
#include "api/units/data_rate.h"
#include "api/units/time_delta.h"
namespace webrtc {
// BitrateAllocationUpdate provides information to allocated streams about their
// bitrate allocation. It originates from the BitrateAllocater class and is
// propagated from there.
struct BitrateAllocationUpdate {
// The allocated target bitrate. Media streams should produce this amount of
// data. (Note that this may include packet overhead depending on
// configuration.)
DataRate target_bitrate = DataRate::Zero();
// The allocated part of the estimated link capacity. This is more stable than
// the target as it is based on the underlying link capacity estimate. This
// should be used to change encoder configuration when the cost of change is
// high.
DataRate stable_target_bitrate = DataRate::Zero();
// Predicted packet loss ratio.
double packet_loss_ratio = 0;
// Predicted round trip time.
TimeDelta round_trip_time = TimeDelta::PlusInfinity();
// |bwe_period| is deprecated, use |stable_target_bitrate| allocation instead.
TimeDelta bwe_period = TimeDelta::PlusInfinity();
// Congestion window pushback bitrate reduction fraction. Used in
// VideoStreamEncoder to reduce the bitrate by the given fraction
// by dropping frames.
double cwnd_reduce_ratio = 0;
};
} // namespace webrtc
#endif // API_CALL_BITRATE_ALLOCATION_H_

130
webrtc/api/function_view.h Normal file
View File

@ -0,0 +1,130 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_FUNCTION_VIEW_H_
#define API_FUNCTION_VIEW_H_
#include <type_traits>
#include <utility>
#include "rtc_base/checks.h"
// Just like std::function, FunctionView will wrap any callable and hide its
// actual type, exposing only its signature. But unlike std::function,
// FunctionView doesn't own its callable---it just points to it. Thus, it's a
// good choice mainly as a function argument when the callable argument will
// not be called again once the function has returned.
//
// Its constructors are implicit, so that callers won't have to convert lambdas
// and other callables to FunctionView<Blah(Blah, Blah)> explicitly. This is
// safe because FunctionView is only a reference to the real callable.
//
// Example use:
//
// void SomeFunction(rtc::FunctionView<int(int)> index_transform);
// ...
// SomeFunction([](int i) { return 2 * i + 1; });
//
// Note: FunctionView is tiny (essentially just two pointers) and trivially
// copyable, so it's probably cheaper to pass it by value than by const
// reference.
namespace rtc {
template <typename T>
class FunctionView; // Undefined.
template <typename RetT, typename... ArgT>
class FunctionView<RetT(ArgT...)> final {
public:
// Constructor for lambdas and other callables; it accepts every type of
// argument except those noted in its enable_if call.
template <
typename F,
typename std::enable_if<
// Not for function pointers; we have another constructor for that
// below.
!std::is_function<typename std::remove_pointer<
typename std::remove_reference<F>::type>::type>::value &&
// Not for nullptr; we have another constructor for that below.
!std::is_same<std::nullptr_t,
typename std::remove_cv<F>::type>::value &&
// Not for FunctionView objects; we have another constructor for that
// (the implicitly declared copy constructor).
!std::is_same<FunctionView,
typename std::remove_cv<typename std::remove_reference<
F>::type>::type>::value>::type* = nullptr>
FunctionView(F&& f)
: call_(CallVoidPtr<typename std::remove_reference<F>::type>) {
f_.void_ptr = &f;
}
// Constructor that accepts function pointers. If the argument is null, the
// result is an empty FunctionView.
template <
typename F,
typename std::enable_if<std::is_function<typename std::remove_pointer<
typename std::remove_reference<F>::type>::type>::value>::type* =
nullptr>
FunctionView(F&& f)
: call_(f ? CallFunPtr<typename std::remove_pointer<F>::type> : nullptr) {
f_.fun_ptr = reinterpret_cast<void (*)()>(f);
}
// Constructor that accepts nullptr. It creates an empty FunctionView.
template <typename F,
typename std::enable_if<std::is_same<
std::nullptr_t,
typename std::remove_cv<F>::type>::value>::type* = nullptr>
FunctionView(F&& f) : call_(nullptr) {}
// Default constructor. Creates an empty FunctionView.
FunctionView() : call_(nullptr) {}
RetT operator()(ArgT... args) const {
RTC_DCHECK(call_);
return call_(f_, std::forward<ArgT>(args)...);
}
// Returns true if we have a function, false if we don't (i.e., we're null).
explicit operator bool() const { return !!call_; }
private:
union VoidUnion {
void* void_ptr;
void (*fun_ptr)();
};
template <typename F>
static RetT CallVoidPtr(VoidUnion vu, ArgT... args) {
return (*static_cast<F*>(vu.void_ptr))(std::forward<ArgT>(args)...);
}
template <typename F>
static RetT CallFunPtr(VoidUnion vu, ArgT... args) {
return (reinterpret_cast<typename std::add_pointer<F>::type>(vu.fun_ptr))(
std::forward<ArgT>(args)...);
}
// A pointer to the callable thing, with type information erased. It's a
// union because we have to use separate types depending on if the callable
// thing is a function pointer or something else.
VoidUnion f_;
// Pointer to a dispatch function that knows the type of the callable thing
// that's stored in f_, and how to call it. A FunctionView object is empty
// (null) iff call_ is null.
RetT (*call_)(VoidUnion, ArgT...);
};
} // namespace rtc
#endif // API_FUNCTION_VIEW_H_

46
webrtc/api/meson.build Normal file
View File

@ -0,0 +1,46 @@
api_sources = [
'audio/audio_frame.cc',
'audio/channel_layout.cc',
'audio/echo_canceller3_config.cc',
'audio_codecs/audio_decoder.cc',
'audio_codecs/audio_encoder.cc',
'rtp_headers.cc',
'rtp_packet_info.cc',
'task_queue/task_queue_base.cc',
'units/data_rate.cc',
'units/data_size.cc',
'units/frequency.cc',
'units/time_delta.cc',
'units/timestamp.cc',
'video/color_space.cc',
'video/hdr_metadata.cc',
'video/video_content_type.cc',
'video/video_timing.cc',
]
api_headers = [
['', 'array_view.h'],
['', 'scoped_refptr.h'],
['audio', 'echo_canceller3_config.h'],
['audio', 'echo_control.h'],
]
foreach h : api_headers
install_headers(
join_paths(h[0], h[1]),
subdir: join_paths('webrtc_audio_processing', 'api', h[0])
)
endforeach
libapi = static_library('libapi',
api_sources,
dependencies: common_deps,
include_directories: webrtc_inc,
cpp_args : common_cxxflags
)
api_dep = declare_dependency(
link_with: libapi
)

View File

@ -0,0 +1,43 @@
/*
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_REF_COUNTED_BASE_H_
#define API_REF_COUNTED_BASE_H_
#include "rtc_base/constructor_magic.h"
#include "rtc_base/ref_count.h"
#include "rtc_base/ref_counter.h"
namespace rtc {
class RefCountedBase {
public:
RefCountedBase() = default;
void AddRef() const { ref_count_.IncRef(); }
RefCountReleaseStatus Release() const {
const auto status = ref_count_.DecRef();
if (status == RefCountReleaseStatus::kDroppedLastRef) {
delete this;
}
return status;
}
protected:
virtual ~RefCountedBase() = default;
private:
mutable webrtc::webrtc_impl::RefCounter ref_count_{0};
RTC_DISALLOW_COPY_AND_ASSIGN(RefCountedBase);
};
} // namespace rtc
#endif // API_REF_COUNTED_BASE_H_

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
* *
* Use of this source code is governed by a BSD-style license * Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source * that can be found in the LICENSE file in the root of the source
@ -8,18 +8,10 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include "webrtc/common_types.h" #include "api/rtp_headers.h"
#include <string.h>
namespace webrtc { namespace webrtc {
int InStream::Rewind() { return -1; }
int OutStream::Rewind() { return -1; }
StreamDataCounters::StreamDataCounters() : first_packet_time_ms(-1) {}
RTPHeaderExtension::RTPHeaderExtension() RTPHeaderExtension::RTPHeaderExtension()
: hasTransmissionTimeOffset(false), : hasTransmissionTimeOffset(false),
transmissionTimeOffset(0), transmissionTimeOffset(0),
@ -31,8 +23,16 @@ RTPHeaderExtension::RTPHeaderExtension()
voiceActivity(false), voiceActivity(false),
audioLevel(0), audioLevel(0),
hasVideoRotation(false), hasVideoRotation(false),
videoRotation(0) { videoRotation(kVideoRotation_0),
} hasVideoContentType(false),
videoContentType(VideoContentType::UNSPECIFIED),
has_video_timing(false) {}
RTPHeaderExtension::RTPHeaderExtension(const RTPHeaderExtension& other) =
default;
RTPHeaderExtension& RTPHeaderExtension::operator=(
const RTPHeaderExtension& other) = default;
RTPHeader::RTPHeader() RTPHeader::RTPHeader()
: markerBit(false), : markerBit(false),
@ -41,11 +41,14 @@ RTPHeader::RTPHeader()
timestamp(0), timestamp(0),
ssrc(0), ssrc(0),
numCSRCs(0), numCSRCs(0),
arrOfCSRCs(),
paddingLength(0), paddingLength(0),
headerLength(0), headerLength(0),
payload_type_frequency(0), payload_type_frequency(0),
extension() { extension() {}
memset(&arrOfCSRCs, 0, sizeof(arrOfCSRCs));
} RTPHeader::RTPHeader(const RTPHeader& other) = default;
RTPHeader& RTPHeader::operator=(const RTPHeader& other) = default;
} // namespace webrtc } // namespace webrtc

190
webrtc/api/rtp_headers.h Normal file
View File

@ -0,0 +1,190 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_RTP_HEADERS_H_
#define API_RTP_HEADERS_H_
#include <stddef.h>
#include <stdint.h>
#include <string>
#include "absl/types/optional.h"
#include "api/array_view.h"
#include "api/units/timestamp.h"
#include "api/video/color_space.h"
#include "api/video/video_content_type.h"
#include "api/video/video_rotation.h"
#include "api/video/video_timing.h"
namespace webrtc {
struct FeedbackRequest {
// Determines whether the recv delta as specified in
// https://tools.ietf.org/html/draft-holmer-rmcat-transport-wide-cc-extensions-01
// should be included.
bool include_timestamps;
// Include feedback of received packets in the range [sequence_number -
// sequence_count + 1, sequence_number]. That is, no feedback will be sent if
// sequence_count is zero.
int sequence_count;
};
// The Absolute Capture Time extension is used to stamp RTP packets with a NTP
// timestamp showing when the first audio or video frame in a packet was
// originally captured. The intent of this extension is to provide a way to
// accomplish audio-to-video synchronization when RTCP-terminating intermediate
// systems (e.g. mixers) are involved. See:
// http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time
struct AbsoluteCaptureTime {
// Absolute capture timestamp is the NTP timestamp of when the first frame in
// a packet was originally captured. This timestamp MUST be based on the same
// clock as the clock used to generate NTP timestamps for RTCP sender reports
// on the capture system.
//
// Its not always possible to do an NTP clock readout at the exact moment of
// when a media frame is captured. A capture system MAY postpone the readout
// until a more convenient time. A capture system SHOULD have known delays
// (e.g. from hardware buffers) subtracted from the readout to make the final
// timestamp as close to the actual capture time as possible.
//
// This field is encoded as a 64-bit unsigned fixed-point number with the high
// 32 bits for the timestamp in seconds and low 32 bits for the fractional
// part. This is also known as the UQ32.32 format and is what the RTP
// specification defines as the canonical format to represent NTP timestamps.
uint64_t absolute_capture_timestamp;
// Estimated capture clock offset is the senders estimate of the offset
// between its own NTP clock and the capture systems NTP clock. The sender is
// here defined as the system that owns the NTP clock used to generate the NTP
// timestamps for the RTCP sender reports on this stream. The sender system is
// typically either the capture system or a mixer.
//
// This field is encoded as a 64-bit twos complement signed fixed-point
// number with the high 32 bits for the seconds and low 32 bits for the
// fractional part. Its intended to make it easy for a receiver, that knows
// how to estimate the sender systems NTP clock, to also estimate the capture
// systems NTP clock:
//
// Capture NTP Clock = Sender NTP Clock + Capture Clock Offset
absl::optional<int64_t> estimated_capture_clock_offset;
};
inline bool operator==(const AbsoluteCaptureTime& lhs,
const AbsoluteCaptureTime& rhs) {
return (lhs.absolute_capture_timestamp == rhs.absolute_capture_timestamp) &&
(lhs.estimated_capture_clock_offset ==
rhs.estimated_capture_clock_offset);
}
inline bool operator!=(const AbsoluteCaptureTime& lhs,
const AbsoluteCaptureTime& rhs) {
return !(lhs == rhs);
}
struct RTPHeaderExtension {
RTPHeaderExtension();
RTPHeaderExtension(const RTPHeaderExtension& other);
RTPHeaderExtension& operator=(const RTPHeaderExtension& other);
static constexpr int kAbsSendTimeFraction = 18;
Timestamp GetAbsoluteSendTimestamp() const {
RTC_DCHECK(hasAbsoluteSendTime);
RTC_DCHECK(absoluteSendTime < (1ul << 24));
return Timestamp::Micros((absoluteSendTime * 1000000ll) /
(1 << kAbsSendTimeFraction));
}
TimeDelta GetAbsoluteSendTimeDelta(uint32_t previous_sendtime) const {
RTC_DCHECK(hasAbsoluteSendTime);
RTC_DCHECK(absoluteSendTime < (1ul << 24));
RTC_DCHECK(previous_sendtime < (1ul << 24));
int32_t delta =
static_cast<int32_t>((absoluteSendTime - previous_sendtime) << 8) >> 8;
return TimeDelta::Micros((delta * 1000000ll) / (1 << kAbsSendTimeFraction));
}
bool hasTransmissionTimeOffset;
int32_t transmissionTimeOffset;
bool hasAbsoluteSendTime;
uint32_t absoluteSendTime;
absl::optional<AbsoluteCaptureTime> absolute_capture_time;
bool hasTransportSequenceNumber;
uint16_t transportSequenceNumber;
absl::optional<FeedbackRequest> feedback_request;
// Audio Level includes both level in dBov and voiced/unvoiced bit. See:
// https://tools.ietf.org/html/rfc6464#section-3
bool hasAudioLevel;
bool voiceActivity;
uint8_t audioLevel;
// For Coordination of Video Orientation. See
// http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/
// ts_126114v120700p.pdf
bool hasVideoRotation;
VideoRotation videoRotation;
// TODO(ilnik): Refactor this and one above to be absl::optional() and remove
// a corresponding bool flag.
bool hasVideoContentType;
VideoContentType videoContentType;
bool has_video_timing;
VideoSendTiming video_timing;
VideoPlayoutDelay playout_delay;
// For identification of a stream when ssrc is not signaled. See
// https://tools.ietf.org/html/draft-ietf-avtext-rid-09
// TODO(danilchap): Update url from draft to release version.
std::string stream_id;
std::string repaired_stream_id;
// For identifying the media section used to interpret this RTP packet. See
// https://tools.ietf.org/html/draft-ietf-mmusic-sdp-bundle-negotiation-38
std::string mid;
absl::optional<ColorSpace> color_space;
};
enum { kRtpCsrcSize = 15 }; // RFC 3550 page 13
struct RTPHeader {
RTPHeader();
RTPHeader(const RTPHeader& other);
RTPHeader& operator=(const RTPHeader& other);
bool markerBit;
uint8_t payloadType;
uint16_t sequenceNumber;
uint32_t timestamp;
uint32_t ssrc;
uint8_t numCSRCs;
uint32_t arrOfCSRCs[kRtpCsrcSize];
size_t paddingLength;
size_t headerLength;
int payload_type_frequency;
RTPHeaderExtension extension;
};
// RTCP mode to use. Compound mode is described by RFC 4585 and reduced-size
// RTCP mode is described by RFC 5506.
enum class RtcpMode { kOff, kCompound, kReducedSize };
enum NetworkState {
kNetworkUp,
kNetworkDown,
};
} // namespace webrtc
#endif // API_RTP_HEADERS_H_

View File

@ -0,0 +1,60 @@
/*
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/rtp_packet_info.h"
#include <algorithm>
#include <utility>
namespace webrtc {
RtpPacketInfo::RtpPacketInfo()
: ssrc_(0), rtp_timestamp_(0), receive_time_ms_(-1) {}
RtpPacketInfo::RtpPacketInfo(
uint32_t ssrc,
std::vector<uint32_t> csrcs,
uint32_t rtp_timestamp,
absl::optional<uint8_t> audio_level,
absl::optional<AbsoluteCaptureTime> absolute_capture_time,
int64_t receive_time_ms)
: ssrc_(ssrc),
csrcs_(std::move(csrcs)),
rtp_timestamp_(rtp_timestamp),
audio_level_(audio_level),
absolute_capture_time_(absolute_capture_time),
receive_time_ms_(receive_time_ms) {}
RtpPacketInfo::RtpPacketInfo(const RTPHeader& rtp_header,
int64_t receive_time_ms)
: ssrc_(rtp_header.ssrc),
rtp_timestamp_(rtp_header.timestamp),
receive_time_ms_(receive_time_ms) {
const auto& extension = rtp_header.extension;
const auto csrcs_count = std::min<size_t>(rtp_header.numCSRCs, kRtpCsrcSize);
csrcs_.assign(&rtp_header.arrOfCSRCs[0], &rtp_header.arrOfCSRCs[csrcs_count]);
if (extension.hasAudioLevel) {
audio_level_ = extension.audioLevel;
}
absolute_capture_time_ = extension.absolute_capture_time;
}
bool operator==(const RtpPacketInfo& lhs, const RtpPacketInfo& rhs) {
return (lhs.ssrc() == rhs.ssrc()) && (lhs.csrcs() == rhs.csrcs()) &&
(lhs.rtp_timestamp() == rhs.rtp_timestamp()) &&
(lhs.audio_level() == rhs.audio_level()) &&
(lhs.absolute_capture_time() == rhs.absolute_capture_time()) &&
(lhs.receive_time_ms() == rhs.receive_time_ms());
}
} // namespace webrtc

View File

@ -0,0 +1,97 @@
/*
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_RTP_PACKET_INFO_H_
#define API_RTP_PACKET_INFO_H_
#include <cstdint>
#include <utility>
#include <vector>
#include "absl/types/optional.h"
#include "api/rtp_headers.h"
#include "rtc_base/system/rtc_export.h"
namespace webrtc {
//
// Structure to hold information about a received |RtpPacket|. It is primarily
// used to carry per-packet information from when a packet is received until
// the information is passed to |SourceTracker|.
//
class RTC_EXPORT RtpPacketInfo {
public:
RtpPacketInfo();
RtpPacketInfo(uint32_t ssrc,
std::vector<uint32_t> csrcs,
uint32_t rtp_timestamp,
absl::optional<uint8_t> audio_level,
absl::optional<AbsoluteCaptureTime> absolute_capture_time,
int64_t receive_time_ms);
RtpPacketInfo(const RTPHeader& rtp_header, int64_t receive_time_ms);
RtpPacketInfo(const RtpPacketInfo& other) = default;
RtpPacketInfo(RtpPacketInfo&& other) = default;
RtpPacketInfo& operator=(const RtpPacketInfo& other) = default;
RtpPacketInfo& operator=(RtpPacketInfo&& other) = default;
uint32_t ssrc() const { return ssrc_; }
void set_ssrc(uint32_t value) { ssrc_ = value; }
const std::vector<uint32_t>& csrcs() const { return csrcs_; }
void set_csrcs(std::vector<uint32_t> value) { csrcs_ = std::move(value); }
uint32_t rtp_timestamp() const { return rtp_timestamp_; }
void set_rtp_timestamp(uint32_t value) { rtp_timestamp_ = value; }
absl::optional<uint8_t> audio_level() const { return audio_level_; }
void set_audio_level(absl::optional<uint8_t> value) { audio_level_ = value; }
const absl::optional<AbsoluteCaptureTime>& absolute_capture_time() const {
return absolute_capture_time_;
}
void set_absolute_capture_time(
const absl::optional<AbsoluteCaptureTime>& value) {
absolute_capture_time_ = value;
}
int64_t receive_time_ms() const { return receive_time_ms_; }
void set_receive_time_ms(int64_t value) { receive_time_ms_ = value; }
private:
// Fields from the RTP header:
// https://tools.ietf.org/html/rfc3550#section-5.1
uint32_t ssrc_;
std::vector<uint32_t> csrcs_;
uint32_t rtp_timestamp_;
// Fields from the Audio Level header extension:
// https://tools.ietf.org/html/rfc6464#section-3
absl::optional<uint8_t> audio_level_;
// Fields from the Absolute Capture Time header extension:
// http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time
absl::optional<AbsoluteCaptureTime> absolute_capture_time_;
// Local |webrtc::Clock|-based timestamp of when the packet was received.
int64_t receive_time_ms_;
};
bool operator==(const RtpPacketInfo& lhs, const RtpPacketInfo& rhs);
inline bool operator!=(const RtpPacketInfo& lhs, const RtpPacketInfo& rhs) {
return !(lhs == rhs);
}
} // namespace webrtc
#endif // API_RTP_PACKET_INFO_H_

View File

@ -0,0 +1,130 @@
/*
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_RTP_PACKET_INFOS_H_
#define API_RTP_PACKET_INFOS_H_
#include <cstdint>
#include <utility>
#include <vector>
#include "api/ref_counted_base.h"
#include "api/rtp_packet_info.h"
#include "api/scoped_refptr.h"
#include "rtc_base/system/rtc_export.h"
namespace webrtc {
// Semi-immutable structure to hold information about packets used to assemble
// an audio or video frame. Uses internal reference counting to make it very
// cheap to copy.
//
// We should ideally just use |std::vector<RtpPacketInfo>| and have it
// |std::move()|-ed as the per-packet information is transferred from one object
// to another. But moving the info, instead of copying it, is not easily done
// for the current video code.
class RTC_EXPORT RtpPacketInfos {
public:
using vector_type = std::vector<RtpPacketInfo>;
using value_type = vector_type::value_type;
using size_type = vector_type::size_type;
using difference_type = vector_type::difference_type;
using const_reference = vector_type::const_reference;
using const_pointer = vector_type::const_pointer;
using const_iterator = vector_type::const_iterator;
using const_reverse_iterator = vector_type::const_reverse_iterator;
using reference = const_reference;
using pointer = const_pointer;
using iterator = const_iterator;
using reverse_iterator = const_reverse_iterator;
RtpPacketInfos() {}
explicit RtpPacketInfos(const vector_type& entries)
: data_(Data::Create(entries)) {}
explicit RtpPacketInfos(vector_type&& entries)
: data_(Data::Create(std::move(entries))) {}
RtpPacketInfos(const RtpPacketInfos& other) = default;
RtpPacketInfos(RtpPacketInfos&& other) = default;
RtpPacketInfos& operator=(const RtpPacketInfos& other) = default;
RtpPacketInfos& operator=(RtpPacketInfos&& other) = default;
const_reference operator[](size_type pos) const { return entries()[pos]; }
const_reference at(size_type pos) const { return entries().at(pos); }
const_reference front() const { return entries().front(); }
const_reference back() const { return entries().back(); }
const_iterator begin() const { return entries().begin(); }
const_iterator end() const { return entries().end(); }
const_reverse_iterator rbegin() const { return entries().rbegin(); }
const_reverse_iterator rend() const { return entries().rend(); }
const_iterator cbegin() const { return entries().cbegin(); }
const_iterator cend() const { return entries().cend(); }
const_reverse_iterator crbegin() const { return entries().crbegin(); }
const_reverse_iterator crend() const { return entries().crend(); }
bool empty() const { return entries().empty(); }
size_type size() const { return entries().size(); }
private:
class Data : public rtc::RefCountedBase {
public:
static rtc::scoped_refptr<Data> Create(const vector_type& entries) {
// Performance optimization for the empty case.
if (entries.empty()) {
return nullptr;
}
return new Data(entries);
}
static rtc::scoped_refptr<Data> Create(vector_type&& entries) {
// Performance optimization for the empty case.
if (entries.empty()) {
return nullptr;
}
return new Data(std::move(entries));
}
const vector_type& entries() const { return entries_; }
private:
explicit Data(const vector_type& entries) : entries_(entries) {}
explicit Data(vector_type&& entries) : entries_(std::move(entries)) {}
~Data() override {}
const vector_type entries_;
};
static const vector_type& empty_entries() {
static const vector_type& value = *new vector_type();
return value;
}
const vector_type& entries() const {
if (data_ != nullptr) {
return data_->entries();
} else {
return empty_entries();
}
}
rtc::scoped_refptr<Data> data_;
};
} // namespace webrtc
#endif // API_RTP_PACKET_INFOS_H_

164
webrtc/api/scoped_refptr.h Normal file
View File

@ -0,0 +1,164 @@
/*
* Copyright 2011 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// Originally these classes are from Chromium.
// http://src.chromium.org/viewvc/chrome/trunk/src/base/memory/ref_counted.h?view=markup
//
// A smart pointer class for reference counted objects. Use this class instead
// of calling AddRef and Release manually on a reference counted object to
// avoid common memory leaks caused by forgetting to Release an object
// reference. Sample usage:
//
// class MyFoo : public RefCounted<MyFoo> {
// ...
// };
//
// void some_function() {
// scoped_refptr<MyFoo> foo = new MyFoo();
// foo->Method(param);
// // |foo| is released when this function returns
// }
//
// void some_other_function() {
// scoped_refptr<MyFoo> foo = new MyFoo();
// ...
// foo = nullptr; // explicitly releases |foo|
// ...
// if (foo)
// foo->Method(param);
// }
//
// The above examples show how scoped_refptr<T> acts like a pointer to T.
// Given two scoped_refptr<T> classes, it is also possible to exchange
// references between the two objects, like so:
//
// {
// scoped_refptr<MyFoo> a = new MyFoo();
// scoped_refptr<MyFoo> b;
//
// b.swap(a);
// // now, |b| references the MyFoo object, and |a| references null.
// }
//
// To make both |a| and |b| in the above example reference the same MyFoo
// object, simply use the assignment operator:
//
// {
// scoped_refptr<MyFoo> a = new MyFoo();
// scoped_refptr<MyFoo> b;
//
// b = a;
// // now, |a| and |b| each own a reference to the same MyFoo object.
// }
//
#ifndef API_SCOPED_REFPTR_H_
#define API_SCOPED_REFPTR_H_
#include <memory>
#include <utility>
namespace rtc {
template <class T>
class scoped_refptr {
public:
typedef T element_type;
scoped_refptr() : ptr_(nullptr) {}
scoped_refptr(T* p) : ptr_(p) { // NOLINT(runtime/explicit)
if (ptr_)
ptr_->AddRef();
}
scoped_refptr(const scoped_refptr<T>& r) : ptr_(r.ptr_) {
if (ptr_)
ptr_->AddRef();
}
template <typename U>
scoped_refptr(const scoped_refptr<U>& r) : ptr_(r.get()) {
if (ptr_)
ptr_->AddRef();
}
// Move constructors.
scoped_refptr(scoped_refptr<T>&& r) noexcept : ptr_(r.release()) {}
template <typename U>
scoped_refptr(scoped_refptr<U>&& r) noexcept : ptr_(r.release()) {}
~scoped_refptr() {
if (ptr_)
ptr_->Release();
}
T* get() const { return ptr_; }
operator T*() const { return ptr_; }
T* operator->() const { return ptr_; }
// Returns the (possibly null) raw pointer, and makes the scoped_refptr hold a
// null pointer, all without touching the reference count of the underlying
// pointed-to object. The object is still reference counted, and the caller of
// release() is now the proud owner of one reference, so it is responsible for
// calling Release() once on the object when no longer using it.
T* release() {
T* retVal = ptr_;
ptr_ = nullptr;
return retVal;
}
scoped_refptr<T>& operator=(T* p) {
// AddRef first so that self assignment should work
if (p)
p->AddRef();
if (ptr_)
ptr_->Release();
ptr_ = p;
return *this;
}
scoped_refptr<T>& operator=(const scoped_refptr<T>& r) {
return *this = r.ptr_;
}
template <typename U>
scoped_refptr<T>& operator=(const scoped_refptr<U>& r) {
return *this = r.get();
}
scoped_refptr<T>& operator=(scoped_refptr<T>&& r) noexcept {
scoped_refptr<T>(std::move(r)).swap(*this);
return *this;
}
template <typename U>
scoped_refptr<T>& operator=(scoped_refptr<U>&& r) noexcept {
scoped_refptr<T>(std::move(r)).swap(*this);
return *this;
}
void swap(T** pp) noexcept {
T* p = ptr_;
ptr_ = *pp;
*pp = p;
}
void swap(scoped_refptr<T>& r) noexcept { swap(&r.ptr_); }
protected:
T* ptr_;
};
} // namespace rtc
#endif // API_SCOPED_REFPTR_H_

View File

@ -0,0 +1,32 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TASK_QUEUE_QUEUED_TASK_H_
#define API_TASK_QUEUE_QUEUED_TASK_H_
namespace webrtc {
// Base interface for asynchronously executed tasks.
// The interface basically consists of a single function, Run(), that executes
// on the target queue. For more details see the Run() method and TaskQueue.
class QueuedTask {
public:
virtual ~QueuedTask() = default;
// Main routine that will run when the task is executed on the desired queue.
// The task should return |true| to indicate that it should be deleted or
// |false| to indicate that the queue should consider ownership of the task
// having been transferred. Returning |false| can be useful if a task has
// re-posted itself to a different queue or is otherwise being re-used.
virtual bool Run() = 0;
};
} // namespace webrtc
#endif // API_TASK_QUEUE_QUEUED_TASK_H_

View File

@ -0,0 +1,79 @@
/*
* Copyright 2019 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/task_queue/task_queue_base.h"
#include "absl/base/attributes.h"
#include "absl/base/config.h"
#include "rtc_base/checks.h"
#if defined(ABSL_HAVE_THREAD_LOCAL)
namespace webrtc {
namespace {
ABSL_CONST_INIT thread_local TaskQueueBase* current = nullptr;
} // namespace
TaskQueueBase* TaskQueueBase::Current() {
return current;
}
TaskQueueBase::CurrentTaskQueueSetter::CurrentTaskQueueSetter(
TaskQueueBase* task_queue)
: previous_(current) {
current = task_queue;
}
TaskQueueBase::CurrentTaskQueueSetter::~CurrentTaskQueueSetter() {
current = previous_;
}
} // namespace webrtc
#elif defined(WEBRTC_POSIX)
#include <pthread.h>
namespace webrtc {
namespace {
ABSL_CONST_INIT pthread_key_t g_queue_ptr_tls = 0;
void InitializeTls() {
RTC_CHECK(pthread_key_create(&g_queue_ptr_tls, nullptr) == 0);
}
pthread_key_t GetQueuePtrTls() {
static pthread_once_t init_once = PTHREAD_ONCE_INIT;
RTC_CHECK(pthread_once(&init_once, &InitializeTls) == 0);
return g_queue_ptr_tls;
}
} // namespace
TaskQueueBase* TaskQueueBase::Current() {
return static_cast<TaskQueueBase*>(pthread_getspecific(GetQueuePtrTls()));
}
TaskQueueBase::CurrentTaskQueueSetter::CurrentTaskQueueSetter(
TaskQueueBase* task_queue)
: previous_(TaskQueueBase::Current()) {
pthread_setspecific(GetQueuePtrTls(), task_queue);
}
TaskQueueBase::CurrentTaskQueueSetter::~CurrentTaskQueueSetter() {
pthread_setspecific(GetQueuePtrTls(), previous_);
}
} // namespace webrtc
#else
#error Unsupported platform
#endif

View File

@ -0,0 +1,83 @@
/*
* Copyright 2019 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TASK_QUEUE_TASK_QUEUE_BASE_H_
#define API_TASK_QUEUE_TASK_QUEUE_BASE_H_
#include <memory>
#include "api/task_queue/queued_task.h"
#include "rtc_base/system/rtc_export.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
// Asynchronously executes tasks in a way that guarantees that they're executed
// in FIFO order and that tasks never overlap. Tasks may always execute on the
// same worker thread and they may not. To DCHECK that tasks are executing on a
// known task queue, use IsCurrent().
class RTC_LOCKABLE RTC_EXPORT TaskQueueBase {
public:
// Starts destruction of the task queue.
// On return ensures no task are running and no new tasks are able to start
// on the task queue.
// Responsible for deallocation. Deallocation may happen syncrhoniously during
// Delete or asynchronously after Delete returns.
// Code not running on the TaskQueue should not make any assumption when
// TaskQueue is deallocated and thus should not call any methods after Delete.
// Code running on the TaskQueue should not call Delete, but can assume
// TaskQueue still exists and may call other methods, e.g. PostTask.
virtual void Delete() = 0;
// Schedules a task to execute. Tasks are executed in FIFO order.
// If |task->Run()| returns true, task is deleted on the task queue
// before next QueuedTask starts executing.
// When a TaskQueue is deleted, pending tasks will not be executed but they
// will be deleted. The deletion of tasks may happen synchronously on the
// TaskQueue or it may happen asynchronously after TaskQueue is deleted.
// This may vary from one implementation to the next so assumptions about
// lifetimes of pending tasks should not be made.
virtual void PostTask(std::unique_ptr<QueuedTask> task) = 0;
// Schedules a task to execute a specified number of milliseconds from when
// the call is made. The precision should be considered as "best effort"
// and in some cases, such as on Windows when all high precision timers have
// been used up, can be off by as much as 15 millseconds.
virtual void PostDelayedTask(std::unique_ptr<QueuedTask> task,
uint32_t milliseconds) = 0;
// Returns the task queue that is running the current thread.
// Returns nullptr if this thread is not associated with any task queue.
static TaskQueueBase* Current();
bool IsCurrent() const { return Current() == this; }
protected:
class CurrentTaskQueueSetter {
public:
explicit CurrentTaskQueueSetter(TaskQueueBase* task_queue);
CurrentTaskQueueSetter(const CurrentTaskQueueSetter&) = delete;
CurrentTaskQueueSetter& operator=(const CurrentTaskQueueSetter&) = delete;
~CurrentTaskQueueSetter();
private:
TaskQueueBase* const previous_;
};
// Users of the TaskQueue should call Delete instead of directly deleting
// this object.
virtual ~TaskQueueBase() = default;
};
struct TaskQueueDeleter {
void operator()(TaskQueueBase* task_queue) const { task_queue->Delete(); }
};
} // namespace webrtc
#endif // API_TASK_QUEUE_TASK_QUEUE_BASE_H_

View File

@ -0,0 +1,34 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/units/data_rate.h"
#include "api/array_view.h"
#include "rtc_base/strings/string_builder.h"
namespace webrtc {
std::string ToString(DataRate value) {
char buf[64];
rtc::SimpleStringBuilder sb(buf);
if (value.IsPlusInfinity()) {
sb << "+inf bps";
} else if (value.IsMinusInfinity()) {
sb << "-inf bps";
} else {
if (value.bps() == 0 || value.bps() % 1000 != 0) {
sb << value.bps() << " bps";
} else {
sb << value.kbps() << " kbps";
}
}
return sb.str();
}
} // namespace webrtc

View File

@ -0,0 +1,155 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_UNITS_DATA_RATE_H_
#define API_UNITS_DATA_RATE_H_
#ifdef UNIT_TEST
#include <ostream> // no-presubmit-check TODO(webrtc:8982)
#endif // UNIT_TEST
#include <limits>
#include <string>
#include <type_traits>
#include "api/units/data_size.h"
#include "api/units/frequency.h"
#include "api/units/time_delta.h"
#include "rtc_base/checks.h"
#include "rtc_base/units/unit_base.h"
namespace webrtc {
// DataRate is a class that represents a given data rate. This can be used to
// represent bandwidth, encoding bitrate, etc. The internal storage is bits per
// second (bps).
class DataRate final : public rtc_units_impl::RelativeUnit<DataRate> {
public:
template <typename T>
static constexpr DataRate BitsPerSec(T value) {
static_assert(std::is_arithmetic<T>::value, "");
return FromValue(value);
}
template <typename T>
static constexpr DataRate BytesPerSec(T value) {
static_assert(std::is_arithmetic<T>::value, "");
return FromFraction(8, value);
}
template <typename T>
static constexpr DataRate KilobitsPerSec(T value) {
static_assert(std::is_arithmetic<T>::value, "");
return FromFraction(1000, value);
}
static constexpr DataRate Infinity() { return PlusInfinity(); }
DataRate() = delete;
template <typename T = int64_t>
constexpr T bps() const {
return ToValue<T>();
}
template <typename T = int64_t>
constexpr T bytes_per_sec() const {
return ToFraction<8, T>();
}
template <typename T = int64_t>
constexpr T kbps() const {
return ToFraction<1000, T>();
}
constexpr int64_t bps_or(int64_t fallback_value) const {
return ToValueOr(fallback_value);
}
constexpr int64_t kbps_or(int64_t fallback_value) const {
return ToFractionOr<1000>(fallback_value);
}
private:
// Bits per second used internally to simplify debugging by making the value
// more recognizable.
friend class rtc_units_impl::UnitBase<DataRate>;
using RelativeUnit::RelativeUnit;
static constexpr bool one_sided = true;
};
namespace data_rate_impl {
inline constexpr int64_t Microbits(const DataSize& size) {
constexpr int64_t kMaxBeforeConversion =
std::numeric_limits<int64_t>::max() / 8000000;
RTC_DCHECK_LE(size.bytes(), kMaxBeforeConversion)
<< "size is too large to be expressed in microbits";
return size.bytes() * 8000000;
}
inline constexpr int64_t MillibytePerSec(const DataRate& size) {
constexpr int64_t kMaxBeforeConversion =
std::numeric_limits<int64_t>::max() / (1000 / 8);
RTC_DCHECK_LE(size.bps(), kMaxBeforeConversion)
<< "rate is too large to be expressed in microbytes per second";
return size.bps() * (1000 / 8);
}
} // namespace data_rate_impl
inline constexpr DataRate operator/(const DataSize size,
const TimeDelta duration) {
return DataRate::BitsPerSec(data_rate_impl::Microbits(size) / duration.us());
}
inline constexpr TimeDelta operator/(const DataSize size, const DataRate rate) {
return TimeDelta::Micros(data_rate_impl::Microbits(size) / rate.bps());
}
inline constexpr DataSize operator*(const DataRate rate,
const TimeDelta duration) {
int64_t microbits = rate.bps() * duration.us();
return DataSize::Bytes((microbits + 4000000) / 8000000);
}
inline constexpr DataSize operator*(const TimeDelta duration,
const DataRate rate) {
return rate * duration;
}
inline constexpr DataSize operator/(const DataRate rate,
const Frequency frequency) {
int64_t millihertz = frequency.millihertz<int64_t>();
// Note that the value is truncated here reather than rounded, potentially
// introducing an error of .5 bytes if rounding were expected.
return DataSize::Bytes(data_rate_impl::MillibytePerSec(rate) / millihertz);
}
inline constexpr Frequency operator/(const DataRate rate, const DataSize size) {
return Frequency::MilliHertz(data_rate_impl::MillibytePerSec(rate) /
size.bytes());
}
inline constexpr DataRate operator*(const DataSize size,
const Frequency frequency) {
RTC_DCHECK(frequency.IsZero() ||
size.bytes() <= std::numeric_limits<int64_t>::max() / 8 /
frequency.millihertz<int64_t>());
int64_t millibits_per_second =
size.bytes() * 8 * frequency.millihertz<int64_t>();
return DataRate::BitsPerSec((millibits_per_second + 500) / 1000);
}
inline constexpr DataRate operator*(const Frequency frequency,
const DataSize size) {
return size * frequency;
}
std::string ToString(DataRate value);
inline std::string ToLogString(DataRate value) {
return ToString(value);
}
#ifdef UNIT_TEST
inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982)
std::ostream& stream, // no-presubmit-check TODO(webrtc:8982)
DataRate value) {
return stream << ToString(value);
}
#endif // UNIT_TEST
} // namespace webrtc
#endif // API_UNITS_DATA_RATE_H_

View File

@ -0,0 +1,30 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/units/data_size.h"
#include "api/array_view.h"
#include "rtc_base/strings/string_builder.h"
namespace webrtc {
std::string ToString(DataSize value) {
char buf[64];
rtc::SimpleStringBuilder sb(buf);
if (value.IsPlusInfinity()) {
sb << "+inf bytes";
} else if (value.IsMinusInfinity()) {
sb << "-inf bytes";
} else {
sb << value.bytes() << " bytes";
}
return sb.str();
}
} // namespace webrtc

View File

@ -0,0 +1,66 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_UNITS_DATA_SIZE_H_
#define API_UNITS_DATA_SIZE_H_
#ifdef UNIT_TEST
#include <ostream> // no-presubmit-check TODO(webrtc:8982)
#endif // UNIT_TEST
#include <string>
#include <type_traits>
#include "rtc_base/units/unit_base.h"
namespace webrtc {
// DataSize is a class represeting a count of bytes.
class DataSize final : public rtc_units_impl::RelativeUnit<DataSize> {
public:
template <typename T>
static constexpr DataSize Bytes(T value) {
static_assert(std::is_arithmetic<T>::value, "");
return FromValue(value);
}
static constexpr DataSize Infinity() { return PlusInfinity(); }
DataSize() = delete;
template <typename T = int64_t>
constexpr T bytes() const {
return ToValue<T>();
}
constexpr int64_t bytes_or(int64_t fallback_value) const {
return ToValueOr(fallback_value);
}
private:
friend class rtc_units_impl::UnitBase<DataSize>;
using RelativeUnit::RelativeUnit;
static constexpr bool one_sided = true;
};
std::string ToString(DataSize value);
inline std::string ToLogString(DataSize value) {
return ToString(value);
}
#ifdef UNIT_TEST
inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982)
std::ostream& stream, // no-presubmit-check TODO(webrtc:8982)
DataSize value) {
return stream << ToString(value);
}
#endif // UNIT_TEST
} // namespace webrtc
#endif // API_UNITS_DATA_SIZE_H_

View File

@ -0,0 +1,29 @@
/*
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/units/frequency.h"
#include "rtc_base/strings/string_builder.h"
namespace webrtc {
std::string ToString(Frequency value) {
char buf[64];
rtc::SimpleStringBuilder sb(buf);
if (value.IsPlusInfinity()) {
sb << "+inf Hz";
} else if (value.IsMinusInfinity()) {
sb << "-inf Hz";
} else if (value.millihertz<int64_t>() % 1000 != 0) {
sb.AppendFormat("%.3f Hz", value.hertz<double>());
} else {
sb << value.hertz<int64_t>() << " Hz";
}
return sb.str();
}
} // namespace webrtc

View File

@ -0,0 +1,101 @@
/*
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_UNITS_FREQUENCY_H_
#define API_UNITS_FREQUENCY_H_
#ifdef UNIT_TEST
#include <ostream> // no-presubmit-check TODO(webrtc:8982)
#endif // UNIT_TEST
#include <cstdlib>
#include <limits>
#include <string>
#include <type_traits>
#include "api/units/time_delta.h"
#include "rtc_base/units/unit_base.h"
namespace webrtc {
class Frequency final : public rtc_units_impl::RelativeUnit<Frequency> {
public:
template <typename T>
static constexpr Frequency MilliHertz(T value) {
static_assert(std::is_arithmetic<T>::value, "");
return FromValue(value);
}
template <typename T>
static constexpr Frequency Hertz(T value) {
static_assert(std::is_arithmetic<T>::value, "");
return FromFraction(1'000, value);
}
template <typename T>
static constexpr Frequency KiloHertz(T value) {
static_assert(std::is_arithmetic<T>::value, "");
return FromFraction(1'000'000, value);
}
Frequency() = delete;
template <typename T = int64_t>
constexpr T hertz() const {
return ToFraction<1000, T>();
}
template <typename T = int64_t>
constexpr T millihertz() const {
return ToValue<T>();
}
private:
friend class rtc_units_impl::UnitBase<Frequency>;
using RelativeUnit::RelativeUnit;
static constexpr bool one_sided = true;
};
inline constexpr Frequency operator/(int64_t nominator,
const TimeDelta& interval) {
constexpr int64_t kKiloPerMicro = 1000 * 1000000;
RTC_DCHECK_LE(nominator, std::numeric_limits<int64_t>::max() / kKiloPerMicro);
RTC_CHECK(interval.IsFinite());
RTC_CHECK(!interval.IsZero());
return Frequency::MilliHertz(nominator * kKiloPerMicro / interval.us());
}
inline constexpr TimeDelta operator/(int64_t nominator,
const Frequency& frequency) {
constexpr int64_t kMegaPerMilli = 1000000 * 1000;
RTC_DCHECK_LE(nominator, std::numeric_limits<int64_t>::max() / kMegaPerMilli);
RTC_CHECK(frequency.IsFinite());
RTC_CHECK(!frequency.IsZero());
return TimeDelta::Micros(nominator * kMegaPerMilli / frequency.millihertz());
}
inline constexpr double operator*(Frequency frequency, TimeDelta time_delta) {
return frequency.hertz<double>() * time_delta.seconds<double>();
}
inline constexpr double operator*(TimeDelta time_delta, Frequency frequency) {
return frequency * time_delta;
}
std::string ToString(Frequency value);
inline std::string ToLogString(Frequency value) {
return ToString(value);
}
#ifdef UNIT_TEST
inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982)
std::ostream& stream, // no-presubmit-check TODO(webrtc:8982)
Frequency value) {
return stream << ToString(value);
}
#endif // UNIT_TEST
} // namespace webrtc
#endif // API_UNITS_FREQUENCY_H_

View File

@ -0,0 +1,36 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/units/time_delta.h"
#include "api/array_view.h"
#include "rtc_base/strings/string_builder.h"
namespace webrtc {
std::string ToString(TimeDelta value) {
char buf[64];
rtc::SimpleStringBuilder sb(buf);
if (value.IsPlusInfinity()) {
sb << "+inf ms";
} else if (value.IsMinusInfinity()) {
sb << "-inf ms";
} else {
if (value.us() == 0 || (value.us() % 1000) != 0)
sb << value.us() << " us";
else if (value.ms() % 1000 != 0)
sb << value.ms() << " ms";
else
sb << value.seconds() << " s";
}
return sb.str();
}
} // namespace webrtc

View File

@ -0,0 +1,105 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_UNITS_TIME_DELTA_H_
#define API_UNITS_TIME_DELTA_H_
#ifdef UNIT_TEST
#include <ostream> // no-presubmit-check TODO(webrtc:8982)
#endif // UNIT_TEST
#include <cstdlib>
#include <string>
#include <type_traits>
#include "rtc_base/units/unit_base.h"
namespace webrtc {
// TimeDelta represents the difference between two timestamps. Commonly this can
// be a duration. However since two Timestamps are not guaranteed to have the
// same epoch (they might come from different computers, making exact
// synchronisation infeasible), the duration covered by a TimeDelta can be
// undefined. To simplify usage, it can be constructed and converted to
// different units, specifically seconds (s), milliseconds (ms) and
// microseconds (us).
class TimeDelta final : public rtc_units_impl::RelativeUnit<TimeDelta> {
public:
template <typename T>
static constexpr TimeDelta Seconds(T value) {
static_assert(std::is_arithmetic<T>::value, "");
return FromFraction(1'000'000, value);
}
template <typename T>
static constexpr TimeDelta Millis(T value) {
static_assert(std::is_arithmetic<T>::value, "");
return FromFraction(1'000, value);
}
template <typename T>
static constexpr TimeDelta Micros(T value) {
static_assert(std::is_arithmetic<T>::value, "");
return FromValue(value);
}
TimeDelta() = delete;
template <typename T = int64_t>
constexpr T seconds() const {
return ToFraction<1000000, T>();
}
template <typename T = int64_t>
constexpr T ms() const {
return ToFraction<1000, T>();
}
template <typename T = int64_t>
constexpr T us() const {
return ToValue<T>();
}
template <typename T = int64_t>
constexpr T ns() const {
return ToMultiple<1000, T>();
}
constexpr int64_t seconds_or(int64_t fallback_value) const {
return ToFractionOr<1000000>(fallback_value);
}
constexpr int64_t ms_or(int64_t fallback_value) const {
return ToFractionOr<1000>(fallback_value);
}
constexpr int64_t us_or(int64_t fallback_value) const {
return ToValueOr(fallback_value);
}
constexpr TimeDelta Abs() const {
return us() < 0 ? TimeDelta::Micros(-us()) : *this;
}
private:
friend class rtc_units_impl::UnitBase<TimeDelta>;
using RelativeUnit::RelativeUnit;
static constexpr bool one_sided = false;
};
std::string ToString(TimeDelta value);
inline std::string ToLogString(TimeDelta value) {
return ToString(value);
}
#ifdef UNIT_TEST
inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982)
std::ostream& stream, // no-presubmit-check TODO(webrtc:8982)
TimeDelta value) {
return stream << ToString(value);
}
#endif // UNIT_TEST
} // namespace webrtc
#endif // API_UNITS_TIME_DELTA_H_

View File

@ -0,0 +1,34 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/units/timestamp.h"
#include "api/array_view.h"
#include "rtc_base/strings/string_builder.h"
namespace webrtc {
std::string ToString(Timestamp value) {
char buf[64];
rtc::SimpleStringBuilder sb(buf);
if (value.IsPlusInfinity()) {
sb << "+inf ms";
} else if (value.IsMinusInfinity()) {
sb << "-inf ms";
} else {
if (value.us() == 0 || (value.us() % 1000) != 0)
sb << value.us() << " us";
else if (value.ms() % 1000 != 0)
sb << value.ms() << " ms";
else
sb << value.seconds() << " s";
}
return sb.str();
}
} // namespace webrtc

View File

@ -0,0 +1,138 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_UNITS_TIMESTAMP_H_
#define API_UNITS_TIMESTAMP_H_
#ifdef UNIT_TEST
#include <ostream> // no-presubmit-check TODO(webrtc:8982)
#endif // UNIT_TEST
#include <string>
#include <type_traits>
#include "api/units/time_delta.h"
#include "rtc_base/checks.h"
namespace webrtc {
// Timestamp represents the time that has passed since some unspecified epoch.
// The epoch is assumed to be before any represented timestamps, this means that
// negative values are not valid. The most notable feature is that the
// difference of two Timestamps results in a TimeDelta.
class Timestamp final : public rtc_units_impl::UnitBase<Timestamp> {
public:
template <typename T>
static constexpr Timestamp Seconds(T value) {
static_assert(std::is_arithmetic<T>::value, "");
return FromFraction(1'000'000, value);
}
template <typename T>
static constexpr Timestamp Millis(T value) {
static_assert(std::is_arithmetic<T>::value, "");
return FromFraction(1'000, value);
}
template <typename T>
static constexpr Timestamp Micros(T value) {
static_assert(std::is_arithmetic<T>::value, "");
return FromValue(value);
}
Timestamp() = delete;
template <typename T = int64_t>
constexpr T seconds() const {
return ToFraction<1000000, T>();
}
template <typename T = int64_t>
constexpr T ms() const {
return ToFraction<1000, T>();
}
template <typename T = int64_t>
constexpr T us() const {
return ToValue<T>();
}
constexpr int64_t seconds_or(int64_t fallback_value) const {
return ToFractionOr<1000000>(fallback_value);
}
constexpr int64_t ms_or(int64_t fallback_value) const {
return ToFractionOr<1000>(fallback_value);
}
constexpr int64_t us_or(int64_t fallback_value) const {
return ToValueOr(fallback_value);
}
constexpr Timestamp operator+(const TimeDelta delta) const {
if (IsPlusInfinity() || delta.IsPlusInfinity()) {
RTC_DCHECK(!IsMinusInfinity());
RTC_DCHECK(!delta.IsMinusInfinity());
return PlusInfinity();
} else if (IsMinusInfinity() || delta.IsMinusInfinity()) {
RTC_DCHECK(!IsPlusInfinity());
RTC_DCHECK(!delta.IsPlusInfinity());
return MinusInfinity();
}
return Timestamp::Micros(us() + delta.us());
}
constexpr Timestamp operator-(const TimeDelta delta) const {
if (IsPlusInfinity() || delta.IsMinusInfinity()) {
RTC_DCHECK(!IsMinusInfinity());
RTC_DCHECK(!delta.IsPlusInfinity());
return PlusInfinity();
} else if (IsMinusInfinity() || delta.IsPlusInfinity()) {
RTC_DCHECK(!IsPlusInfinity());
RTC_DCHECK(!delta.IsMinusInfinity());
return MinusInfinity();
}
return Timestamp::Micros(us() - delta.us());
}
constexpr TimeDelta operator-(const Timestamp other) const {
if (IsPlusInfinity() || other.IsMinusInfinity()) {
RTC_DCHECK(!IsMinusInfinity());
RTC_DCHECK(!other.IsPlusInfinity());
return TimeDelta::PlusInfinity();
} else if (IsMinusInfinity() || other.IsPlusInfinity()) {
RTC_DCHECK(!IsPlusInfinity());
RTC_DCHECK(!other.IsMinusInfinity());
return TimeDelta::MinusInfinity();
}
return TimeDelta::Micros(us() - other.us());
}
constexpr Timestamp& operator-=(const TimeDelta delta) {
*this = *this - delta;
return *this;
}
constexpr Timestamp& operator+=(const TimeDelta delta) {
*this = *this + delta;
return *this;
}
private:
friend class rtc_units_impl::UnitBase<Timestamp>;
using UnitBase::UnitBase;
static constexpr bool one_sided = true;
};
std::string ToString(Timestamp value);
inline std::string ToLogString(Timestamp value) {
return ToString(value);
}
#ifdef UNIT_TEST
inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982)
std::ostream& stream, // no-presubmit-check TODO(webrtc:8982)
Timestamp value) {
return stream << ToString(value);
}
#endif // UNIT_TEST
} // namespace webrtc
#endif // API_UNITS_TIMESTAMP_H_

View File

@ -0,0 +1,187 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/video/color_space.h"
namespace webrtc {
namespace {
// Try to convert |enum_value| into the enum class T. |enum_bitmask| is created
// by the funciton below. Returns true if conversion was successful, false
// otherwise.
template <typename T>
bool SetFromUint8(uint8_t enum_value, uint64_t enum_bitmask, T* out) {
if ((enum_value < 64) && ((enum_bitmask >> enum_value) & 1)) {
*out = static_cast<T>(enum_value);
return true;
}
return false;
}
// This function serves as an assert for the constexpr function below. It's on
// purpose not declared as constexpr so that it causes a build problem if enum
// values of 64 or above are used. The bitmask and the code generating it would
// have to be extended if the standard is updated to include enum values >= 64.
int EnumMustBeLessThan64() {
return -1;
}
template <typename T, size_t N>
constexpr int MakeMask(const int index, const int length, T (&values)[N]) {
return length > 1
? (MakeMask(index, 1, values) +
MakeMask(index + 1, length - 1, values))
: (static_cast<uint8_t>(values[index]) < 64
? (uint64_t{1} << static_cast<uint8_t>(values[index]))
: EnumMustBeLessThan64());
}
// Create a bitmask where each bit corresponds to one potential enum value.
// |values| should be an array listing all possible enum values. The bit is set
// to one if the corresponding enum exists. Only works for enums with values
// less than 64.
template <typename T, size_t N>
constexpr uint64_t CreateEnumBitmask(T (&values)[N]) {
return MakeMask(0, N, values);
}
bool SetChromaSitingFromUint8(uint8_t enum_value,
ColorSpace::ChromaSiting* chroma_siting) {
constexpr ColorSpace::ChromaSiting kChromaSitings[] = {
ColorSpace::ChromaSiting::kUnspecified,
ColorSpace::ChromaSiting::kCollocated, ColorSpace::ChromaSiting::kHalf};
constexpr uint64_t enum_bitmask = CreateEnumBitmask(kChromaSitings);
return SetFromUint8(enum_value, enum_bitmask, chroma_siting);
}
} // namespace
ColorSpace::ColorSpace() = default;
ColorSpace::ColorSpace(const ColorSpace& other) = default;
ColorSpace::ColorSpace(ColorSpace&& other) = default;
ColorSpace& ColorSpace::operator=(const ColorSpace& other) = default;
ColorSpace::ColorSpace(PrimaryID primaries,
TransferID transfer,
MatrixID matrix,
RangeID range)
: ColorSpace(primaries,
transfer,
matrix,
range,
ChromaSiting::kUnspecified,
ChromaSiting::kUnspecified,
nullptr) {}
ColorSpace::ColorSpace(PrimaryID primaries,
TransferID transfer,
MatrixID matrix,
RangeID range,
ChromaSiting chroma_siting_horz,
ChromaSiting chroma_siting_vert,
const HdrMetadata* hdr_metadata)
: primaries_(primaries),
transfer_(transfer),
matrix_(matrix),
range_(range),
chroma_siting_horizontal_(chroma_siting_horz),
chroma_siting_vertical_(chroma_siting_vert),
hdr_metadata_(hdr_metadata ? absl::make_optional(*hdr_metadata)
: absl::nullopt) {}
ColorSpace::PrimaryID ColorSpace::primaries() const {
return primaries_;
}
ColorSpace::TransferID ColorSpace::transfer() const {
return transfer_;
}
ColorSpace::MatrixID ColorSpace::matrix() const {
return matrix_;
}
ColorSpace::RangeID ColorSpace::range() const {
return range_;
}
ColorSpace::ChromaSiting ColorSpace::chroma_siting_horizontal() const {
return chroma_siting_horizontal_;
}
ColorSpace::ChromaSiting ColorSpace::chroma_siting_vertical() const {
return chroma_siting_vertical_;
}
const HdrMetadata* ColorSpace::hdr_metadata() const {
return hdr_metadata_ ? &*hdr_metadata_ : nullptr;
}
bool ColorSpace::set_primaries_from_uint8(uint8_t enum_value) {
constexpr PrimaryID kPrimaryIds[] = {
PrimaryID::kBT709, PrimaryID::kUnspecified, PrimaryID::kBT470M,
PrimaryID::kBT470BG, PrimaryID::kSMPTE170M, PrimaryID::kSMPTE240M,
PrimaryID::kFILM, PrimaryID::kBT2020, PrimaryID::kSMPTEST428,
PrimaryID::kSMPTEST431, PrimaryID::kSMPTEST432, PrimaryID::kJEDECP22};
constexpr uint64_t enum_bitmask = CreateEnumBitmask(kPrimaryIds);
return SetFromUint8(enum_value, enum_bitmask, &primaries_);
}
bool ColorSpace::set_transfer_from_uint8(uint8_t enum_value) {
constexpr TransferID kTransferIds[] = {
TransferID::kBT709, TransferID::kUnspecified,
TransferID::kGAMMA22, TransferID::kGAMMA28,
TransferID::kSMPTE170M, TransferID::kSMPTE240M,
TransferID::kLINEAR, TransferID::kLOG,
TransferID::kLOG_SQRT, TransferID::kIEC61966_2_4,
TransferID::kBT1361_ECG, TransferID::kIEC61966_2_1,
TransferID::kBT2020_10, TransferID::kBT2020_12,
TransferID::kSMPTEST2084, TransferID::kSMPTEST428,
TransferID::kARIB_STD_B67};
constexpr uint64_t enum_bitmask = CreateEnumBitmask(kTransferIds);
return SetFromUint8(enum_value, enum_bitmask, &transfer_);
}
bool ColorSpace::set_matrix_from_uint8(uint8_t enum_value) {
constexpr MatrixID kMatrixIds[] = {
MatrixID::kRGB, MatrixID::kBT709, MatrixID::kUnspecified,
MatrixID::kFCC, MatrixID::kBT470BG, MatrixID::kSMPTE170M,
MatrixID::kSMPTE240M, MatrixID::kYCOCG, MatrixID::kBT2020_NCL,
MatrixID::kBT2020_CL, MatrixID::kSMPTE2085, MatrixID::kCDNCLS,
MatrixID::kCDCLS, MatrixID::kBT2100_ICTCP};
constexpr uint64_t enum_bitmask = CreateEnumBitmask(kMatrixIds);
return SetFromUint8(enum_value, enum_bitmask, &matrix_);
}
bool ColorSpace::set_range_from_uint8(uint8_t enum_value) {
constexpr RangeID kRangeIds[] = {RangeID::kInvalid, RangeID::kLimited,
RangeID::kFull, RangeID::kDerived};
constexpr uint64_t enum_bitmask = CreateEnumBitmask(kRangeIds);
return SetFromUint8(enum_value, enum_bitmask, &range_);
}
bool ColorSpace::set_chroma_siting_horizontal_from_uint8(uint8_t enum_value) {
return SetChromaSitingFromUint8(enum_value, &chroma_siting_horizontal_);
}
bool ColorSpace::set_chroma_siting_vertical_from_uint8(uint8_t enum_value) {
return SetChromaSitingFromUint8(enum_value, &chroma_siting_vertical_);
}
void ColorSpace::set_hdr_metadata(const HdrMetadata* hdr_metadata) {
hdr_metadata_ =
hdr_metadata ? absl::make_optional(*hdr_metadata) : absl::nullopt;
}
} // namespace webrtc

View File

@ -0,0 +1,178 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_VIDEO_COLOR_SPACE_H_
#define API_VIDEO_COLOR_SPACE_H_
#include <stdint.h>
#include "absl/types/optional.h"
#include "api/video/hdr_metadata.h"
#include "rtc_base/system/rtc_export.h"
namespace webrtc {
// This class represents color information as specified in T-REC H.273,
// available from https://www.itu.int/rec/T-REC-H.273.
//
// WebRTC's supported codecs:
// - VP9 supports color profiles, see VP9 Bitstream & Decoding Process
// Specification Version 0.6 Section 7.2.2 "Color config semantics" available
// from https://www.webmproject.org.
// - VP8 only supports BT.601, see
// https://tools.ietf.org/html/rfc6386#section-9.2
// - H264 uses the exact same representation as T-REC H.273. See T-REC-H.264
// E.2.1, "VUI parameters semantics", available from
// https://www.itu.int/rec/T-REC-H.264.
class RTC_EXPORT ColorSpace {
public:
enum class PrimaryID : uint8_t {
// The indices are equal to the values specified in T-REC H.273 Table 2.
kBT709 = 1,
kUnspecified = 2,
kBT470M = 4,
kBT470BG = 5,
kSMPTE170M = 6, // Identical to BT601
kSMPTE240M = 7,
kFILM = 8,
kBT2020 = 9,
kSMPTEST428 = 10,
kSMPTEST431 = 11,
kSMPTEST432 = 12,
kJEDECP22 = 22, // Identical to EBU3213-E
// When adding/removing entries here, please make sure to do the
// corresponding change to kPrimaryIds.
};
enum class TransferID : uint8_t {
// The indices are equal to the values specified in T-REC H.273 Table 3.
kBT709 = 1,
kUnspecified = 2,
kGAMMA22 = 4,
kGAMMA28 = 5,
kSMPTE170M = 6,
kSMPTE240M = 7,
kLINEAR = 8,
kLOG = 9,
kLOG_SQRT = 10,
kIEC61966_2_4 = 11,
kBT1361_ECG = 12,
kIEC61966_2_1 = 13,
kBT2020_10 = 14,
kBT2020_12 = 15,
kSMPTEST2084 = 16,
kSMPTEST428 = 17,
kARIB_STD_B67 = 18,
// When adding/removing entries here, please make sure to do the
// corresponding change to kTransferIds.
};
enum class MatrixID : uint8_t {
// The indices are equal to the values specified in T-REC H.273 Table 4.
kRGB = 0,
kBT709 = 1,
kUnspecified = 2,
kFCC = 4,
kBT470BG = 5,
kSMPTE170M = 6,
kSMPTE240M = 7,
kYCOCG = 8,
kBT2020_NCL = 9,
kBT2020_CL = 10,
kSMPTE2085 = 11,
kCDNCLS = 12,
kCDCLS = 13,
kBT2100_ICTCP = 14,
// When adding/removing entries here, please make sure to do the
// corresponding change to kMatrixIds.
};
enum class RangeID {
// The indices are equal to the values specified at
// https://www.webmproject.org/docs/container/#colour for the element Range.
kInvalid = 0,
// Limited Rec. 709 color range with RGB values ranging from 16 to 235.
kLimited = 1,
// Full RGB color range with RGB valees from 0 to 255.
kFull = 2,
// Range is defined by MatrixCoefficients/TransferCharacteristics.
kDerived = 3,
// When adding/removing entries here, please make sure to do the
// corresponding change to kRangeIds.
};
enum class ChromaSiting {
// Chroma siting specifies how chroma is subsampled relative to the luma
// samples in a YUV video frame.
// The indices are equal to the values specified at
// https://www.webmproject.org/docs/container/#colour for the element
// ChromaSitingVert and ChromaSitingHorz.
kUnspecified = 0,
kCollocated = 1,
kHalf = 2,
// When adding/removing entries here, please make sure to do the
// corresponding change to kChromaSitings.
};
ColorSpace();
ColorSpace(const ColorSpace& other);
ColorSpace(ColorSpace&& other);
ColorSpace& operator=(const ColorSpace& other);
ColorSpace(PrimaryID primaries,
TransferID transfer,
MatrixID matrix,
RangeID range);
ColorSpace(PrimaryID primaries,
TransferID transfer,
MatrixID matrix,
RangeID range,
ChromaSiting chroma_siting_horizontal,
ChromaSiting chroma_siting_vertical,
const HdrMetadata* hdr_metadata);
friend bool operator==(const ColorSpace& lhs, const ColorSpace& rhs) {
return lhs.primaries_ == rhs.primaries_ && lhs.transfer_ == rhs.transfer_ &&
lhs.matrix_ == rhs.matrix_ && lhs.range_ == rhs.range_ &&
lhs.chroma_siting_horizontal_ == rhs.chroma_siting_horizontal_ &&
lhs.chroma_siting_vertical_ == rhs.chroma_siting_vertical_ &&
lhs.hdr_metadata_ == rhs.hdr_metadata_;
}
friend bool operator!=(const ColorSpace& lhs, const ColorSpace& rhs) {
return !(lhs == rhs);
}
PrimaryID primaries() const;
TransferID transfer() const;
MatrixID matrix() const;
RangeID range() const;
ChromaSiting chroma_siting_horizontal() const;
ChromaSiting chroma_siting_vertical() const;
const HdrMetadata* hdr_metadata() const;
bool set_primaries_from_uint8(uint8_t enum_value);
bool set_transfer_from_uint8(uint8_t enum_value);
bool set_matrix_from_uint8(uint8_t enum_value);
bool set_range_from_uint8(uint8_t enum_value);
bool set_chroma_siting_horizontal_from_uint8(uint8_t enum_value);
bool set_chroma_siting_vertical_from_uint8(uint8_t enum_value);
void set_hdr_metadata(const HdrMetadata* hdr_metadata);
private:
PrimaryID primaries_ = PrimaryID::kUnspecified;
TransferID transfer_ = TransferID::kUnspecified;
MatrixID matrix_ = MatrixID::kUnspecified;
RangeID range_ = RangeID::kInvalid;
ChromaSiting chroma_siting_horizontal_ = ChromaSiting::kUnspecified;
ChromaSiting chroma_siting_vertical_ = ChromaSiting::kUnspecified;
absl::optional<HdrMetadata> hdr_metadata_;
};
} // namespace webrtc
#endif // API_VIDEO_COLOR_SPACE_H_

View File

@ -0,0 +1,21 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/video/hdr_metadata.h"
namespace webrtc {
HdrMasteringMetadata::Chromaticity::Chromaticity() = default;
HdrMasteringMetadata::HdrMasteringMetadata() = default;
HdrMetadata::HdrMetadata() = default;
} // namespace webrtc

View File

@ -0,0 +1,105 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_VIDEO_HDR_METADATA_H_
#define API_VIDEO_HDR_METADATA_H_
namespace webrtc {
// SMPTE ST 2086 mastering metadata,
// see https://ieeexplore.ieee.org/document/8353899.
struct HdrMasteringMetadata {
struct Chromaticity {
Chromaticity();
bool operator==(const Chromaticity& rhs) const {
return x == rhs.x && y == rhs.y;
}
bool Validate() const {
return x >= 0.0 && x <= 1.0 && y >= 0.0 && y <= 1.0;
}
// xy chromaticity coordinates must be calculated as specified in ISO
// 11664-3:2012 Section 7, and must be specified with four decimal places.
// The x coordinate should be in the range [0.0001, 0.7400] and the y
// coordinate should be in the range [0.0001, 0.8400]. Valid range [0.0000,
// 1.0000].
float x = 0.0f;
float y = 0.0f;
};
HdrMasteringMetadata();
bool operator==(const HdrMasteringMetadata& rhs) const {
return ((primary_r == rhs.primary_r) && (primary_g == rhs.primary_g) &&
(primary_b == rhs.primary_b) && (white_point == rhs.white_point) &&
(luminance_max == rhs.luminance_max) &&
(luminance_min == rhs.luminance_min));
}
bool Validate() const {
return luminance_max >= 0.0 && luminance_max <= 20000.0 &&
luminance_min >= 0.0 && luminance_min <= 5.0 &&
primary_r.Validate() && primary_g.Validate() &&
primary_b.Validate() && white_point.Validate();
}
// The nominal primaries of the mastering display.
Chromaticity primary_r;
Chromaticity primary_g;
Chromaticity primary_b;
// The nominal chromaticity of the white point of the mastering display.
Chromaticity white_point;
// The nominal maximum display luminance of the mastering display. Specified
// in the unit candela/m2. The value should be in the range [5, 10000] with
// zero decimal places. Valid range [0, 20000].
float luminance_max = 0.0f;
// The nominal minimum display luminance of the mastering display. Specified
// in the unit candela/m2. The value should be in the range [0.0001, 5.0000]
// with four decimal places. Valid range [0.0000, 5.0000].
float luminance_min = 0.0f;
};
// High dynamic range (HDR) metadata common for HDR10 and WebM/VP9-based HDR
// formats. This struct replicates the HDRMetadata struct defined in
// https://cs.chromium.org/chromium/src/media/base/hdr_metadata.h
struct HdrMetadata {
HdrMetadata();
bool operator==(const HdrMetadata& rhs) const {
return (
(max_content_light_level == rhs.max_content_light_level) &&
(max_frame_average_light_level == rhs.max_frame_average_light_level) &&
(mastering_metadata == rhs.mastering_metadata));
}
bool Validate() const {
return max_content_light_level >= 0 && max_content_light_level <= 20000 &&
max_frame_average_light_level >= 0 &&
max_frame_average_light_level <= 20000 &&
mastering_metadata.Validate();
}
HdrMasteringMetadata mastering_metadata;
// Max content light level (CLL), i.e. maximum brightness level present in the
// stream, in nits. 1 nit = 1 candela/m2. Valid range [0, 20000].
int max_content_light_level = 0;
// Max frame-average light level (FALL), i.e. maximum average brightness of
// the brightest frame in the stream, in nits. Valid range [0, 20000].
int max_frame_average_light_level = 0;
};
} // namespace webrtc
#endif // API_VIDEO_HDR_METADATA_H_

View File

@ -0,0 +1,93 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/video/video_content_type.h"
// VideoContentType stored as a single byte, which is sent over the network.
// Structure:
//
// 0 1 2 3 4 5 6 7
// +---------------+
// |r r e e e s s c|
//
// where:
// r - reserved bits.
// e - 3-bit number of an experiment group counted from 1. 0 means there's no
// experiment ongoing.
// s - 2-bit simulcast stream id or spatial layer, counted from 1. 0 means that
// no simulcast information is set.
// c - content type. 0 means real-time video, 1 means screenshare.
//
namespace webrtc {
namespace videocontenttypehelpers {
namespace {
static constexpr uint8_t kScreenshareBitsSize = 1;
static constexpr uint8_t kScreenshareBitsMask =
(1u << kScreenshareBitsSize) - 1;
static constexpr uint8_t kSimulcastShift = 1;
static constexpr uint8_t kSimulcastBitsSize = 2;
static constexpr uint8_t kSimulcastBitsMask = ((1u << kSimulcastBitsSize) - 1)
<< kSimulcastShift; // 0b00000110
static constexpr uint8_t kExperimentShift = 3;
static constexpr uint8_t kExperimentBitsSize = 3;
static constexpr uint8_t kExperimentBitsMask =
((1u << kExperimentBitsSize) - 1) << kExperimentShift; // 0b00111000
static constexpr uint8_t kTotalBitsSize =
kScreenshareBitsSize + kSimulcastBitsSize + kExperimentBitsSize;
} // namespace
bool SetExperimentId(VideoContentType* content_type, uint8_t experiment_id) {
// Store in bits 2-4.
if (experiment_id >= (1 << kExperimentBitsSize))
return false;
*content_type = static_cast<VideoContentType>(
(static_cast<uint8_t>(*content_type) & ~kExperimentBitsMask) |
((experiment_id << kExperimentShift) & kExperimentBitsMask));
return true;
}
bool SetSimulcastId(VideoContentType* content_type, uint8_t simulcast_id) {
// Store in bits 5-6.
if (simulcast_id >= (1 << kSimulcastBitsSize))
return false;
*content_type = static_cast<VideoContentType>(
(static_cast<uint8_t>(*content_type) & ~kSimulcastBitsMask) |
((simulcast_id << kSimulcastShift) & kSimulcastBitsMask));
return true;
}
uint8_t GetExperimentId(const VideoContentType& content_type) {
return (static_cast<uint8_t>(content_type) & kExperimentBitsMask) >>
kExperimentShift;
}
uint8_t GetSimulcastId(const VideoContentType& content_type) {
return (static_cast<uint8_t>(content_type) & kSimulcastBitsMask) >>
kSimulcastShift;
}
bool IsScreenshare(const VideoContentType& content_type) {
return (static_cast<uint8_t>(content_type) & kScreenshareBitsMask) > 0;
}
bool IsValidContentType(uint8_t value) {
// Any 6-bit value is allowed.
return value < (1 << kTotalBitsSize);
}
const char* ToString(const VideoContentType& content_type) {
return IsScreenshare(content_type) ? "screen" : "realtime";
}
} // namespace videocontenttypehelpers
} // namespace webrtc

View File

@ -0,0 +1,39 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_VIDEO_VIDEO_CONTENT_TYPE_H_
#define API_VIDEO_VIDEO_CONTENT_TYPE_H_
#include <stdint.h>
namespace webrtc {
enum class VideoContentType : uint8_t {
UNSPECIFIED = 0,
SCREENSHARE = 1,
};
namespace videocontenttypehelpers {
bool SetExperimentId(VideoContentType* content_type, uint8_t experiment_id);
bool SetSimulcastId(VideoContentType* content_type, uint8_t simulcast_id);
uint8_t GetExperimentId(const VideoContentType& content_type);
uint8_t GetSimulcastId(const VideoContentType& content_type);
bool IsScreenshare(const VideoContentType& content_type);
bool IsValidContentType(uint8_t value);
const char* ToString(const VideoContentType& content_type);
} // namespace videocontenttypehelpers
} // namespace webrtc
#endif // API_VIDEO_VIDEO_CONTENT_TYPE_H_

View File

@ -0,0 +1,26 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_VIDEO_VIDEO_ROTATION_H_
#define API_VIDEO_VIDEO_ROTATION_H_
namespace webrtc {
// enum for clockwise rotation.
enum VideoRotation {
kVideoRotation_0 = 0,
kVideoRotation_90 = 90,
kVideoRotation_180 = 180,
kVideoRotation_270 = 270
};
} // namespace webrtc
#endif // API_VIDEO_VIDEO_ROTATION_H_

View File

@ -0,0 +1,92 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/video/video_timing.h"
#include "api/array_view.h"
#include "rtc_base/logging.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "rtc_base/strings/string_builder.h"
namespace webrtc {
uint16_t VideoSendTiming::GetDeltaCappedMs(int64_t base_ms, int64_t time_ms) {
if (time_ms < base_ms) {
RTC_DLOG(LS_ERROR) << "Delta " << (time_ms - base_ms)
<< "ms expected to be positive";
}
return rtc::saturated_cast<uint16_t>(time_ms - base_ms);
}
TimingFrameInfo::TimingFrameInfo()
: rtp_timestamp(0),
capture_time_ms(-1),
encode_start_ms(-1),
encode_finish_ms(-1),
packetization_finish_ms(-1),
pacer_exit_ms(-1),
network_timestamp_ms(-1),
network2_timestamp_ms(-1),
receive_start_ms(-1),
receive_finish_ms(-1),
decode_start_ms(-1),
decode_finish_ms(-1),
render_time_ms(-1),
flags(VideoSendTiming::kNotTriggered) {}
int64_t TimingFrameInfo::EndToEndDelay() const {
return capture_time_ms >= 0 ? decode_finish_ms - capture_time_ms : -1;
}
bool TimingFrameInfo::IsLongerThan(const TimingFrameInfo& other) const {
int64_t other_delay = other.EndToEndDelay();
return other_delay == -1 || EndToEndDelay() > other_delay;
}
bool TimingFrameInfo::operator<(const TimingFrameInfo& other) const {
return other.IsLongerThan(*this);
}
bool TimingFrameInfo::operator<=(const TimingFrameInfo& other) const {
return !IsLongerThan(other);
}
bool TimingFrameInfo::IsOutlier() const {
return !IsInvalid() && (flags & VideoSendTiming::kTriggeredBySize);
}
bool TimingFrameInfo::IsTimerTriggered() const {
return !IsInvalid() && (flags & VideoSendTiming::kTriggeredByTimer);
}
bool TimingFrameInfo::IsInvalid() const {
return flags == VideoSendTiming::kInvalid;
}
std::string TimingFrameInfo::ToString() const {
if (IsInvalid()) {
return "";
}
char buf[1024];
rtc::SimpleStringBuilder sb(buf);
sb << rtp_timestamp << ',' << capture_time_ms << ',' << encode_start_ms << ','
<< encode_finish_ms << ',' << packetization_finish_ms << ','
<< pacer_exit_ms << ',' << network_timestamp_ms << ','
<< network2_timestamp_ms << ',' << receive_start_ms << ','
<< receive_finish_ms << ',' << decode_start_ms << ',' << decode_finish_ms
<< ',' << render_time_ms << ',' << IsOutlier() << ','
<< IsTimerTriggered();
return sb.str();
}
} // namespace webrtc

View File

@ -0,0 +1,129 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_VIDEO_VIDEO_TIMING_H_
#define API_VIDEO_VIDEO_TIMING_H_
#include <stdint.h>
#include <limits>
#include <string>
namespace webrtc {
// Video timing timestamps in ms counted from capture_time_ms of a frame.
// This structure represents data sent in video-timing RTP header extension.
struct VideoSendTiming {
enum TimingFrameFlags : uint8_t {
kNotTriggered = 0, // Timing info valid, but not to be transmitted.
// Used on send-side only.
kTriggeredByTimer = 1 << 0, // Frame marked for tracing by periodic timer.
kTriggeredBySize = 1 << 1, // Frame marked for tracing due to size.
kInvalid = std::numeric_limits<uint8_t>::max() // Invalid, ignore!
};
// Returns |time_ms - base_ms| capped at max 16-bit value.
// Used to fill this data structure as per
// https://webrtc.org/experiments/rtp-hdrext/video-timing/ extension stores
// 16-bit deltas of timestamps from packet capture time.
static uint16_t GetDeltaCappedMs(int64_t base_ms, int64_t time_ms);
uint16_t encode_start_delta_ms;
uint16_t encode_finish_delta_ms;
uint16_t packetization_finish_delta_ms;
uint16_t pacer_exit_delta_ms;
uint16_t network_timestamp_delta_ms;
uint16_t network2_timestamp_delta_ms;
uint8_t flags;
};
// Used to report precise timings of a 'timing frames'. Contains all important
// timestamps for a lifetime of that specific frame. Reported as a string via
// GetStats(). Only frame which took the longest between two GetStats calls is
// reported.
struct TimingFrameInfo {
TimingFrameInfo();
// Returns end-to-end delay of a frame, if sender and receiver timestamps are
// synchronized, -1 otherwise.
int64_t EndToEndDelay() const;
// Returns true if current frame took longer to process than |other| frame.
// If other frame's clocks are not synchronized, current frame is always
// preferred.
bool IsLongerThan(const TimingFrameInfo& other) const;
// Returns true if flags are set to indicate this frame was marked for tracing
// due to the size being outside some limit.
bool IsOutlier() const;
// Returns true if flags are set to indicate this frame was marked fro tracing
// due to cyclic timer.
bool IsTimerTriggered() const;
// Returns true if the timing data is marked as invalid, in which case it
// should be ignored.
bool IsInvalid() const;
std::string ToString() const;
bool operator<(const TimingFrameInfo& other) const;
bool operator<=(const TimingFrameInfo& other) const;
uint32_t rtp_timestamp; // Identifier of a frame.
// All timestamps below are in local monotonous clock of a receiver.
// If sender clock is not yet estimated, sender timestamps
// (capture_time_ms ... pacer_exit_ms) are negative values, still
// relatively correct.
int64_t capture_time_ms; // Captrue time of a frame.
int64_t encode_start_ms; // Encode start time.
int64_t encode_finish_ms; // Encode completion time.
int64_t packetization_finish_ms; // Time when frame was passed to pacer.
int64_t pacer_exit_ms; // Time when last packet was pushed out of pacer.
// Two in-network RTP processor timestamps: meaning is application specific.
int64_t network_timestamp_ms;
int64_t network2_timestamp_ms;
int64_t receive_start_ms; // First received packet time.
int64_t receive_finish_ms; // Last received packet time.
int64_t decode_start_ms; // Decode start time.
int64_t decode_finish_ms; // Decode completion time.
int64_t render_time_ms; // Proposed render time to insure smooth playback.
uint8_t flags; // Flags indicating validity and/or why tracing was triggered.
};
// Minimum and maximum playout delay values from capture to render.
// These are best effort values.
//
// A value < 0 indicates no change from previous valid value.
//
// min = max = 0 indicates that the receiver should try and render
// frame as soon as possible.
//
// min = x, max = y indicates that the receiver is free to adapt
// in the range (x, y) based on network jitter.
struct VideoPlayoutDelay {
VideoPlayoutDelay() = default;
VideoPlayoutDelay(int min_ms, int max_ms) : min_ms(min_ms), max_ms(max_ms) {}
int min_ms = -1;
int max_ms = -1;
bool operator==(const VideoPlayoutDelay& rhs) const {
return min_ms == rhs.min_ms && max_ms == rhs.max_ms;
}
};
// TODO(bugs.webrtc.org/7660): Old name, delete after downstream use is updated.
using PlayoutDelay = VideoPlayoutDelay;
} // namespace webrtc
#endif // API_VIDEO_VIDEO_TIMING_H_

View File

@ -0,0 +1,53 @@
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
import("../../webrtc.gni")
group("utility") {
deps = [ ":audio_frame_operations" ]
}
rtc_library("audio_frame_operations") {
visibility = [ "*" ]
sources = [
"audio_frame_operations.cc",
"audio_frame_operations.h",
"channel_mixer.cc",
"channel_mixer.h",
"channel_mixing_matrix.cc",
"channel_mixing_matrix.h",
]
deps = [
"../../api/audio:audio_frame_api",
"../../common_audio",
"../../rtc_base:checks",
"../../rtc_base:deprecation",
"../../rtc_base:rtc_base_approved",
"../../system_wrappers:field_trial",
]
}
if (rtc_include_tests) {
rtc_library("utility_tests") {
testonly = true
sources = [
"audio_frame_operations_unittest.cc",
"channel_mixer_unittest.cc",
"channel_mixing_matrix_unittest.cc",
]
deps = [
":audio_frame_operations",
"../../api/audio:audio_frame_api",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
"../../test:field_trial",
"../../test:test_support",
"//testing/gtest",
]
}
}

View File

@ -0,0 +1,294 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "audio/utility/audio_frame_operations.h"
#include <string.h>
#include <algorithm>
#include <cstdint>
#include <utility>
#include "common_audio/include/audio_util.h"
#include "rtc_base/checks.h"
#include "rtc_base/numerics/safe_conversions.h"
namespace webrtc {
namespace {
// 2.7ms @ 48kHz, 4ms @ 32kHz, 8ms @ 16kHz.
const size_t kMuteFadeFrames = 128;
const float kMuteFadeInc = 1.0f / kMuteFadeFrames;
} // namespace
void AudioFrameOperations::Add(const AudioFrame& frame_to_add,
AudioFrame* result_frame) {
// Sanity check.
RTC_DCHECK(result_frame);
RTC_DCHECK_GT(result_frame->num_channels_, 0);
RTC_DCHECK_EQ(result_frame->num_channels_, frame_to_add.num_channels_);
bool no_previous_data = result_frame->muted();
if (result_frame->samples_per_channel_ != frame_to_add.samples_per_channel_) {
// Special case we have no data to start with.
RTC_DCHECK_EQ(result_frame->samples_per_channel_, 0);
result_frame->samples_per_channel_ = frame_to_add.samples_per_channel_;
no_previous_data = true;
}
if (result_frame->vad_activity_ == AudioFrame::kVadActive ||
frame_to_add.vad_activity_ == AudioFrame::kVadActive) {
result_frame->vad_activity_ = AudioFrame::kVadActive;
} else if (result_frame->vad_activity_ == AudioFrame::kVadUnknown ||
frame_to_add.vad_activity_ == AudioFrame::kVadUnknown) {
result_frame->vad_activity_ = AudioFrame::kVadUnknown;
}
if (result_frame->speech_type_ != frame_to_add.speech_type_)
result_frame->speech_type_ = AudioFrame::kUndefined;
if (!frame_to_add.muted()) {
const int16_t* in_data = frame_to_add.data();
int16_t* out_data = result_frame->mutable_data();
size_t length =
frame_to_add.samples_per_channel_ * frame_to_add.num_channels_;
if (no_previous_data) {
std::copy(in_data, in_data + length, out_data);
} else {
for (size_t i = 0; i < length; i++) {
const int32_t wrap_guard = static_cast<int32_t>(out_data[i]) +
static_cast<int32_t>(in_data[i]);
out_data[i] = rtc::saturated_cast<int16_t>(wrap_guard);
}
}
}
}
int AudioFrameOperations::MonoToStereo(AudioFrame* frame) {
if (frame->num_channels_ != 1) {
return -1;
}
UpmixChannels(2, frame);
return 0;
}
int AudioFrameOperations::StereoToMono(AudioFrame* frame) {
if (frame->num_channels_ != 2) {
return -1;
}
DownmixChannels(1, frame);
return frame->num_channels_ == 1 ? 0 : -1;
}
void AudioFrameOperations::QuadToStereo(const int16_t* src_audio,
size_t samples_per_channel,
int16_t* dst_audio) {
for (size_t i = 0; i < samples_per_channel; i++) {
dst_audio[i * 2] =
(static_cast<int32_t>(src_audio[4 * i]) + src_audio[4 * i + 1]) >> 1;
dst_audio[i * 2 + 1] =
(static_cast<int32_t>(src_audio[4 * i + 2]) + src_audio[4 * i + 3]) >>
1;
}
}
int AudioFrameOperations::QuadToStereo(AudioFrame* frame) {
if (frame->num_channels_ != 4) {
return -1;
}
RTC_DCHECK_LE(frame->samples_per_channel_ * 4,
AudioFrame::kMaxDataSizeSamples);
if (!frame->muted()) {
QuadToStereo(frame->data(), frame->samples_per_channel_,
frame->mutable_data());
}
frame->num_channels_ = 2;
return 0;
}
void AudioFrameOperations::DownmixChannels(const int16_t* src_audio,
size_t src_channels,
size_t samples_per_channel,
size_t dst_channels,
int16_t* dst_audio) {
if (src_channels > 1 && dst_channels == 1) {
DownmixInterleavedToMono(src_audio, samples_per_channel, src_channels,
dst_audio);
return;
} else if (src_channels == 4 && dst_channels == 2) {
QuadToStereo(src_audio, samples_per_channel, dst_audio);
return;
}
RTC_NOTREACHED() << "src_channels: " << src_channels
<< ", dst_channels: " << dst_channels;
}
void AudioFrameOperations::DownmixChannels(size_t dst_channels,
AudioFrame* frame) {
RTC_DCHECK_LE(frame->samples_per_channel_ * frame->num_channels_,
AudioFrame::kMaxDataSizeSamples);
if (frame->num_channels_ > 1 && dst_channels == 1) {
if (!frame->muted()) {
DownmixInterleavedToMono(frame->data(), frame->samples_per_channel_,
frame->num_channels_, frame->mutable_data());
}
frame->num_channels_ = 1;
} else if (frame->num_channels_ == 4 && dst_channels == 2) {
int err = QuadToStereo(frame);
RTC_DCHECK_EQ(err, 0);
} else {
RTC_NOTREACHED() << "src_channels: " << frame->num_channels_
<< ", dst_channels: " << dst_channels;
}
}
void AudioFrameOperations::UpmixChannels(size_t target_number_of_channels,
AudioFrame* frame) {
RTC_DCHECK_EQ(frame->num_channels_, 1);
RTC_DCHECK_LE(frame->samples_per_channel_ * target_number_of_channels,
AudioFrame::kMaxDataSizeSamples);
if (frame->num_channels_ != 1 ||
frame->samples_per_channel_ * target_number_of_channels >
AudioFrame::kMaxDataSizeSamples) {
return;
}
if (!frame->muted()) {
// Up-mixing done in place. Going backwards through the frame ensure nothing
// is irrevocably overwritten.
for (int i = frame->samples_per_channel_ - 1; i >= 0; i--) {
for (size_t j = 0; j < target_number_of_channels; ++j) {
frame->mutable_data()[target_number_of_channels * i + j] =
frame->data()[i];
}
}
}
frame->num_channels_ = target_number_of_channels;
}
void AudioFrameOperations::SwapStereoChannels(AudioFrame* frame) {
RTC_DCHECK(frame);
if (frame->num_channels_ != 2 || frame->muted()) {
return;
}
int16_t* frame_data = frame->mutable_data();
for (size_t i = 0; i < frame->samples_per_channel_ * 2; i += 2) {
std::swap(frame_data[i], frame_data[i + 1]);
}
}
void AudioFrameOperations::Mute(AudioFrame* frame,
bool previous_frame_muted,
bool current_frame_muted) {
RTC_DCHECK(frame);
if (!previous_frame_muted && !current_frame_muted) {
// Not muted, don't touch.
} else if (previous_frame_muted && current_frame_muted) {
// Frame fully muted.
size_t total_samples = frame->samples_per_channel_ * frame->num_channels_;
RTC_DCHECK_GE(AudioFrame::kMaxDataSizeSamples, total_samples);
frame->Mute();
} else {
// Fade is a no-op on a muted frame.
if (frame->muted()) {
return;
}
// Limit number of samples to fade, if frame isn't long enough.
size_t count = kMuteFadeFrames;
float inc = kMuteFadeInc;
if (frame->samples_per_channel_ < kMuteFadeFrames) {
count = frame->samples_per_channel_;
if (count > 0) {
inc = 1.0f / count;
}
}
size_t start = 0;
size_t end = count;
float start_g = 0.0f;
if (current_frame_muted) {
// Fade out the last |count| samples of frame.
RTC_DCHECK(!previous_frame_muted);
start = frame->samples_per_channel_ - count;
end = frame->samples_per_channel_;
start_g = 1.0f;
inc = -inc;
} else {
// Fade in the first |count| samples of frame.
RTC_DCHECK(previous_frame_muted);
}
// Perform fade.
int16_t* frame_data = frame->mutable_data();
size_t channels = frame->num_channels_;
for (size_t j = 0; j < channels; ++j) {
float g = start_g;
for (size_t i = start * channels; i < end * channels; i += channels) {
g += inc;
frame_data[i + j] *= g;
}
}
}
}
void AudioFrameOperations::Mute(AudioFrame* frame) {
Mute(frame, true, true);
}
void AudioFrameOperations::ApplyHalfGain(AudioFrame* frame) {
RTC_DCHECK(frame);
RTC_DCHECK_GT(frame->num_channels_, 0);
if (frame->num_channels_ < 1 || frame->muted()) {
return;
}
int16_t* frame_data = frame->mutable_data();
for (size_t i = 0; i < frame->samples_per_channel_ * frame->num_channels_;
i++) {
frame_data[i] = frame_data[i] >> 1;
}
}
int AudioFrameOperations::Scale(float left, float right, AudioFrame* frame) {
if (frame->num_channels_ != 2) {
return -1;
} else if (frame->muted()) {
return 0;
}
int16_t* frame_data = frame->mutable_data();
for (size_t i = 0; i < frame->samples_per_channel_; i++) {
frame_data[2 * i] = static_cast<int16_t>(left * frame_data[2 * i]);
frame_data[2 * i + 1] = static_cast<int16_t>(right * frame_data[2 * i + 1]);
}
return 0;
}
int AudioFrameOperations::ScaleWithSat(float scale, AudioFrame* frame) {
if (frame->muted()) {
return 0;
}
int16_t* frame_data = frame->mutable_data();
for (size_t i = 0; i < frame->samples_per_channel_ * frame->num_channels_;
i++) {
frame_data[i] = rtc::saturated_cast<int16_t>(scale * frame_data[i]);
}
return 0;
}
} // namespace webrtc

View File

@ -0,0 +1,105 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef AUDIO_UTILITY_AUDIO_FRAME_OPERATIONS_H_
#define AUDIO_UTILITY_AUDIO_FRAME_OPERATIONS_H_
#include <stddef.h>
#include <stdint.h>
#include "api/audio/audio_frame.h"
#include "rtc_base/deprecation.h"
namespace webrtc {
// TODO(andrew): consolidate this with utility.h and audio_frame_manipulator.h.
// Change reference parameters to pointers. Consider using a namespace rather
// than a class.
class AudioFrameOperations {
public:
// Add samples in |frame_to_add| with samples in |result_frame|
// putting the results in |results_frame|. The fields
// |vad_activity_| and |speech_type_| of the result frame are
// updated. If |result_frame| is empty (|samples_per_channel_|==0),
// the samples in |frame_to_add| are added to it. The number of
// channels and number of samples per channel must match except when
// |result_frame| is empty.
static void Add(const AudioFrame& frame_to_add, AudioFrame* result_frame);
// |frame.num_channels_| will be updated. This version checks for sufficient
// buffer size and that |num_channels_| is mono. Use UpmixChannels
// instead. TODO(bugs.webrtc.org/8649): remove.
RTC_DEPRECATED static int MonoToStereo(AudioFrame* frame);
// |frame.num_channels_| will be updated. This version checks that
// |num_channels_| is stereo. Use DownmixChannels
// instead. TODO(bugs.webrtc.org/8649): remove.
RTC_DEPRECATED static int StereoToMono(AudioFrame* frame);
// Downmixes 4 channels |src_audio| to stereo |dst_audio|. This is an in-place
// operation, meaning |src_audio| and |dst_audio| may point to the same
// buffer.
static void QuadToStereo(const int16_t* src_audio,
size_t samples_per_channel,
int16_t* dst_audio);
// |frame.num_channels_| will be updated. This version checks that
// |num_channels_| is 4 channels.
static int QuadToStereo(AudioFrame* frame);
// Downmixes |src_channels| |src_audio| to |dst_channels| |dst_audio|.
// This is an in-place operation, meaning |src_audio| and |dst_audio|
// may point to the same buffer. Supported channel combinations are
// Stereo to Mono, Quad to Mono, and Quad to Stereo.
static void DownmixChannels(const int16_t* src_audio,
size_t src_channels,
size_t samples_per_channel,
size_t dst_channels,
int16_t* dst_audio);
// |frame.num_channels_| will be updated. This version checks that
// |num_channels_| and |dst_channels| are valid and performs relevant downmix.
// Supported channel combinations are N channels to Mono, and Quad to Stereo.
static void DownmixChannels(size_t dst_channels, AudioFrame* frame);
// |frame.num_channels_| will be updated. This version checks that
// |num_channels_| and |dst_channels| are valid and performs relevant
// downmix. Supported channel combinations are Mono to N
// channels. The single channel is replicated.
static void UpmixChannels(size_t target_number_of_channels,
AudioFrame* frame);
// Swap the left and right channels of |frame|. Fails silently if |frame| is
// not stereo.
static void SwapStereoChannels(AudioFrame* frame);
// Conditionally zero out contents of |frame| for implementing audio mute:
// |previous_frame_muted| && |current_frame_muted| - Zero out whole frame.
// |previous_frame_muted| && !|current_frame_muted| - Fade-in at frame start.
// !|previous_frame_muted| && |current_frame_muted| - Fade-out at frame end.
// !|previous_frame_muted| && !|current_frame_muted| - Leave frame untouched.
static void Mute(AudioFrame* frame,
bool previous_frame_muted,
bool current_frame_muted);
// Zero out contents of frame.
static void Mute(AudioFrame* frame);
// Halve samples in |frame|.
static void ApplyHalfGain(AudioFrame* frame);
static int Scale(float left, float right, AudioFrame* frame);
static int ScaleWithSat(float scale, AudioFrame* frame);
};
} // namespace webrtc
#endif // AUDIO_UTILITY_AUDIO_FRAME_OPERATIONS_H_

View File

@ -1,596 +0,0 @@
# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
import("//build/config/crypto.gni")
import("//build/config/ui.gni")
import("../build/webrtc.gni")
config("rtc_base_config") {
include_dirs = [
"//third_party/jsoncpp/overrides/include",
"//third_party/jsoncpp/source/include",
]
defines = [
"FEATURE_ENABLE_SSL",
"LOGGING=1",
]
if (is_posix) {
# TODO(henrike): issue 3307, make rtc_base build without disabling
# these flags.
cflags_cc = [ "-Wno-non-virtual-dtor" ]
}
}
config("rtc_base_chromium_config") {
defines = [ "NO_MAIN_THREAD_WRAPPING" ]
}
config("openssl_config") {
defines = [
"SSL_USE_OPENSSL",
"HAVE_OPENSSL_SSL_H",
]
}
config("ios_config") {
libs = [
"CFNetwork.framework",
#"Foundation.framework", # Already included in //build/config:default_libs.
"Security.framework",
"SystemConfiguration.framework",
#"UIKit.framework", # Already included in //build/config:default_libs.
]
}
config("mac_config") {
libs = [
"Cocoa.framework",
#"Foundation.framework", # Already included in //build/config:default_libs.
#"IOKit.framework", # Already included in //build/config:default_libs.
#"Security.framework", # Already included in //build/config:default_libs.
"SystemConfiguration.framework",
]
}
config("mac_x86_config") {
libs = [
#"Carbon.framework", # Already included in //build/config:default_libs.
]
}
if (is_linux && !build_with_chromium) {
# Provides the same functionality as the //crypto:platform target, which
# WebRTC cannot use as we don't sync src/crypto from Chromium.
group("linux_system_ssl") {
if (use_openssl) {
deps = [
"//third_party/boringssl",
]
}
}
}
if (rtc_build_ssl == 0) {
config("external_ssl_library") {
assert(rtc_ssl_root != "",
"You must specify rtc_ssl_root when rtc_build_ssl==0.")
include_dirs = [ rtc_ssl_root ]
}
}
# The subset of rtc_base approved for use outside of libjingle.
static_library("rtc_base_approved") {
configs += [ "..:common_config" ]
public_configs = [ "..:common_inherited_config" ]
sources = [
"array_view.h",
"atomicops.h",
"bitbuffer.cc",
"bitbuffer.h",
"buffer.cc",
"buffer.h",
"bufferqueue.cc",
"bufferqueue.h",
"bytebuffer.cc",
"bytebuffer.h",
"byteorder.h",
"checks.cc",
"checks.h",
"criticalsection.cc",
"criticalsection.h",
"event.cc",
"event.h",
"event_tracer.cc",
"event_tracer.h",
"exp_filter.cc",
"exp_filter.h",
"maybe.h",
"md5.cc",
"md5.h",
"md5digest.cc",
"md5digest.h",
"platform_file.cc",
"platform_file.h",
"platform_thread.cc",
"platform_thread.h",
"safe_conversions.h",
"safe_conversions_impl.h",
"scoped_ptr.h",
"stringencode.cc",
"stringencode.h",
"stringutils.cc",
"stringutils.h",
"systeminfo.cc",
"systeminfo.h",
"template_util.h",
"thread_annotations.h",
"thread_checker.h",
"thread_checker_impl.cc",
"thread_checker_impl.h",
"timeutils.cc",
"timeutils.h",
"trace_event.h",
]
if (!build_with_chromium) {
sources += [
"basictypes.h",
"constructormagic.h",
"logging.cc",
"logging.h",
]
}
}
static_library("rtc_base") {
cflags = []
cflags_cc = []
libs = []
deps = [
":rtc_base_approved",
]
configs += [
"..:common_config",
":rtc_base_config",
]
public_configs = [
"..:common_inherited_config",
":rtc_base_config",
]
defines = [ "LOGGING=1" ]
sources = [
"arraysize.h",
"asyncfile.cc",
"asyncfile.h",
"asyncinvoker-inl.h",
"asyncinvoker.cc",
"asyncinvoker.h",
"asyncpacketsocket.cc",
"asyncpacketsocket.h",
"asyncresolverinterface.cc",
"asyncresolverinterface.h",
"asyncsocket.cc",
"asyncsocket.h",
"asynctcpsocket.cc",
"asynctcpsocket.h",
"asyncudpsocket.cc",
"asyncudpsocket.h",
"autodetectproxy.cc",
"autodetectproxy.h",
"base64.cc",
"base64.h",
"basicdefs.h",
"common.cc",
"common.h",
"crc32.cc",
"crc32.h",
"cryptstring.cc",
"cryptstring.h",
"diskcache.cc",
"diskcache.h",
"filerotatingstream.cc",
"filerotatingstream.h",
"fileutils.cc",
"fileutils.h",
"firewallsocketserver.cc",
"firewallsocketserver.h",
"flags.cc",
"flags.h",
"format_macros.h",
"gunit_prod.h",
"helpers.cc",
"helpers.h",
"httpbase.cc",
"httpbase.h",
"httpclient.cc",
"httpclient.h",
"httpcommon-inl.h",
"httpcommon.cc",
"httpcommon.h",
"httprequest.cc",
"httprequest.h",
"iosfilesystem.mm",
"ipaddress.cc",
"ipaddress.h",
"linked_ptr.h",
"mathutils.h",
"messagedigest.cc",
"messagedigest.h",
"messagehandler.cc",
"messagehandler.h",
"messagequeue.cc",
"messagequeue.h",
"nethelpers.cc",
"nethelpers.h",
"network.cc",
"network.h",
"networkmonitor.cc",
"networkmonitor.h",
"nullsocketserver.h",
"pathutils.cc",
"pathutils.h",
"physicalsocketserver.cc",
"physicalsocketserver.h",
"proxydetect.cc",
"proxydetect.h",
"proxyinfo.cc",
"proxyinfo.h",
"ratelimiter.cc",
"ratelimiter.h",
"ratetracker.cc",
"ratetracker.h",
"rtccertificate.cc",
"rtccertificate.h",
"scoped_autorelease_pool.h",
"scoped_autorelease_pool.mm",
"sha1.cc",
"sha1.h",
"sha1digest.cc",
"sha1digest.h",
"signalthread.cc",
"signalthread.h",
"sigslot.cc",
"sigslot.h",
"sigslotrepeater.h",
"socket.h",
"socketadapters.cc",
"socketadapters.h",
"socketaddress.cc",
"socketaddress.h",
"socketaddresspair.cc",
"socketaddresspair.h",
"socketfactory.h",
"socketpool.cc",
"socketpool.h",
"socketserver.h",
"socketstream.cc",
"socketstream.h",
"ssladapter.cc",
"ssladapter.h",
"sslfingerprint.cc",
"sslfingerprint.h",
"sslidentity.cc",
"sslidentity.h",
"sslsocketfactory.cc",
"sslsocketfactory.h",
"sslstreamadapter.cc",
"sslstreamadapter.h",
"sslstreamadapterhelper.cc",
"sslstreamadapterhelper.h",
"stream.cc",
"stream.h",
"task.cc",
"task.h",
"taskparent.cc",
"taskparent.h",
"taskrunner.cc",
"taskrunner.h",
"thread.cc",
"thread.h",
"timing.cc",
"timing.h",
"urlencode.cc",
"urlencode.h",
"worker.cc",
"worker.h",
]
if (is_posix) {
sources += [
"unixfilesystem.cc",
"unixfilesystem.h",
]
}
if (build_with_chromium) {
sources += [
"../../webrtc_overrides/webrtc/base/logging.cc",
"../../webrtc_overrides/webrtc/base/logging.h",
]
deps += [ "..:webrtc_common" ]
if (is_win) {
sources += [ "../../webrtc_overrides/webrtc/base/win32socketinit.cc" ]
}
include_dirs = [
"../../webrtc_overrides",
"../../boringssl/src/include",
]
public_configs += [ ":rtc_base_chromium_config" ]
} else {
sources += [
"bandwidthsmoother.cc",
"bandwidthsmoother.h",
"bind.h",
"bind.h.pump",
"callback.h",
"callback.h.pump",
"fileutils_mock.h",
"genericslot.h",
"genericslot.h.pump",
"httpserver.cc",
"httpserver.h",
"json.cc",
"json.h",
"logsinks.cc",
"logsinks.h",
"mathutils.h",
"multipart.cc",
"multipart.h",
"natserver.cc",
"natserver.h",
"natsocketfactory.cc",
"natsocketfactory.h",
"nattypes.cc",
"nattypes.h",
"optionsfile.cc",
"optionsfile.h",
"profiler.cc",
"profiler.h",
"proxyserver.cc",
"proxyserver.h",
"refcount.h",
"referencecountedsingletonfactory.h",
"rollingaccumulator.h",
"scoped_ref_ptr.h",
"scopedptrcollection.h",
"sec_buffer.h",
"sharedexclusivelock.cc",
"sharedexclusivelock.h",
"sslconfig.h",
"sslroots.h",
"testclient.cc",
"testclient.h",
"transformadapter.cc",
"transformadapter.h",
"versionparsing.cc",
"versionparsing.h",
"virtualsocketserver.cc",
"virtualsocketserver.h",
"window.h",
"windowpicker.h",
"windowpickerfactory.h",
]
deps += [ "..:webrtc_common" ]
if (is_posix) {
sources += [
"latebindingsymboltable.cc",
"latebindingsymboltable.cc.def",
"latebindingsymboltable.h",
"latebindingsymboltable.h.def",
"posix.cc",
"posix.h",
]
}
if (is_linux) {
sources += [
"dbus.cc",
"dbus.h",
"libdbusglibsymboltable.cc",
"libdbusglibsymboltable.h",
"linuxfdwalk.c",
"linuxfdwalk.h",
]
}
if (is_mac) {
sources += [
"macasyncsocket.cc",
"macasyncsocket.h",
"maccocoasocketserver.h",
"maccocoasocketserver.mm",
"macsocketserver.cc",
"macsocketserver.h",
"macwindowpicker.cc",
"macwindowpicker.h",
]
}
if (is_win) {
sources += [
"diskcache_win32.cc",
"diskcache_win32.h",
"win32regkey.cc",
"win32regkey.h",
"win32socketinit.cc",
"win32socketinit.h",
"win32socketserver.cc",
"win32socketserver.h",
]
}
if (rtc_build_json) {
deps += [ "//third_party/jsoncpp" ]
} else {
include_dirs += [ rtc_jsoncpp_root ]
# When defined changes the include path for json.h to where it is
# expected to be when building json outside of the standalone build.
defines += [ "WEBRTC_EXTERNAL_JSON" ]
}
} # !build_with_chromium
# TODO(henrike): issue 3307, make rtc_base build with the Chromium default
# compiler settings.
configs -= [ "//build/config/compiler:chromium_code" ]
configs += [ "//build/config/compiler:no_chromium_code" ]
if (!is_win) {
cflags += [ "-Wno-uninitialized" ]
cflags_cc += [ "-Wno-non-virtual-dtor" ]
}
if (use_openssl) {
public_configs += [ ":openssl_config" ]
if (rtc_build_ssl) {
deps += [ "//third_party/boringssl" ]
} else {
configs += [ "external_ssl_library" ]
}
sources += [
"openssl.h",
"openssladapter.cc",
"openssladapter.h",
"openssldigest.cc",
"openssldigest.h",
"opensslidentity.cc",
"opensslidentity.h",
"opensslstreamadapter.cc",
"opensslstreamadapter.h",
]
}
if (is_android) {
sources += [
"ifaddrs-android.cc",
"ifaddrs-android.h",
]
libs += [
"log",
"GLESv2",
]
}
if (is_ios) {
all_dependent_configs = [ ":ios_config" ]
sources += [
"macconversion.cc",
"macconversion.h",
]
}
if (use_x11) {
sources += [
"x11windowpicker.cc",
"x11windowpicker.h",
]
libs += [
"dl",
"rt",
"Xext",
"X11",
"Xcomposite",
"Xrender",
]
}
if (is_linux) {
libs += [
"dl",
"rt",
]
}
if (is_mac) {
sources += [
"maccocoathreadhelper.h",
"maccocoathreadhelper.mm",
"macconversion.cc",
"macconversion.h",
"macutils.cc",
"macutils.h",
]
all_dependent_configs = [ ":mac_config" ]
if (current_cpu == "x86") {
all_dependent_configs += [ ":mac_x86_config" ]
}
}
if (is_win) {
sources += [
"win32.cc",
"win32.h",
"win32filesystem.cc",
"win32filesystem.h",
"win32securityerrors.cc",
"win32window.cc",
"win32window.h",
"win32windowpicker.cc",
"win32windowpicker.h",
"winfirewall.cc",
"winfirewall.h",
"winping.cc",
"winping.h",
]
libs += [
"crypt32.lib",
"iphlpapi.lib",
"secur32.lib",
]
cflags += [
# Suppress warnings about WIN32_LEAN_AND_MEAN.
"/wd4005",
"/wd4703",
]
defines += [ "_CRT_NONSTDC_NO_DEPRECATE" ]
}
if (is_posix && is_debug) {
# The Chromium build/common.gypi defines this for all posix
# _except_ for ios & mac. We want it there as well, e.g.
# because ASSERT and friends trigger off of it.
defines += [ "_DEBUG" ]
}
if (is_ios || (is_mac && current_cpu != "x86")) {
defines += [ "CARBON_DEPRECATED=YES" ]
}
if (is_linux || is_android) {
sources += [
"linux.cc",
"linux.h",
]
}
if (is_nacl) {
deps += [ "//native_client_sdk/src/libraries/nacl_io" ]
defines += [ "timezone=_timezone" ]
}
}

View File

@ -1,74 +0,0 @@
/*
* Copyright 2004 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_BASE_BASICTYPES_H_
#define WEBRTC_BASE_BASICTYPES_H_
#include <stddef.h> // for NULL, size_t
#include <stdint.h> // for uintptr_t and (u)int_t types.
#ifdef HAVE_CONFIG_H
#include "config.h" // NOLINT
#endif
// Detect compiler is for x86 or x64.
#if defined(__x86_64__) || defined(_M_X64) || \
defined(__i386__) || defined(_M_IX86)
#define CPU_X86 1
#endif
// Detect compiler is for arm.
#if defined(__arm__) || defined(_M_ARM)
#define CPU_ARM 1
#endif
#if defined(CPU_X86) && defined(CPU_ARM)
#error CPU_X86 and CPU_ARM both defined.
#endif
#if !defined(RTC_ARCH_CPU_BIG_ENDIAN) && !defined(RTC_ARCH_CPU_LITTLE_ENDIAN)
// x86, arm or GCC provided __BYTE_ORDER__ macros
#if CPU_X86 || CPU_ARM || \
(defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__)
#define RTC_ARCH_CPU_LITTLE_ENDIAN
#elif defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
#define RTC_ARCH_CPU_BIG_ENDIAN
#else
#error RTC_ARCH_CPU_BIG_ENDIAN or RTC_ARCH_CPU_LITTLE_ENDIAN should be defined.
#endif
#endif
#if defined(RTC_ARCH_CPU_BIG_ENDIAN) && defined(RTC_ARCH_CPU_LITTLE_ENDIAN)
#error RTC_ARCH_CPU_BIG_ENDIAN and RTC_ARCH_CPU_LITTLE_ENDIAN both defined.
#endif
#if defined(WEBRTC_WIN)
typedef int socklen_t;
#endif
// The following only works for C++
#ifdef __cplusplus
#ifndef ALIGNP
#define ALIGNP(p, t) \
(reinterpret_cast<uint8_t*>(((reinterpret_cast<uintptr_t>(p) + \
((t) - 1)) & ~((t) - 1))))
#endif
#define RTC_IS_ALIGNED(p, a) (!((uintptr_t)(p) & ((a) - 1)))
// Use these to declare and define a static local variable that gets leaked so
// that its destructors are not called at exit.
#define RTC_DEFINE_STATIC_LOCAL(type, name, arguments) \
static type& name = *new type arguments
#endif // __cplusplus
#endif // WEBRTC_BASE_BASICTYPES_H_

View File

@ -1,127 +0,0 @@
/*
* Copyright 2006 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// Most of this was borrowed (with minor modifications) from V8's and Chromium's
// src/base/logging.cc.
// Use the C++ version to provide __GLIBCXX__.
#include <cstdarg>
#include <cstdio>
#include <cstdlib>
#if defined(__GLIBCXX__) && !defined(__UCLIBC__)
#include <cxxabi.h>
#include <execinfo.h>
#endif
#if defined(WEBRTC_ANDROID)
#define LOG_TAG "rtc"
#include <android/log.h> // NOLINT
#endif
#include "webrtc/base/checks.h"
#if defined(_MSC_VER)
// Warning C4722: destructor never returns, potential memory leak.
// FatalMessage's dtor very intentionally aborts.
#pragma warning(disable:4722)
#endif
namespace rtc {
void VPrintError(const char* format, va_list args) {
#if defined(WEBRTC_ANDROID)
__android_log_vprint(ANDROID_LOG_ERROR, LOG_TAG, format, args);
#else
vfprintf(stderr, format, args);
#endif
}
void PrintError(const char* format, ...) {
va_list args;
va_start(args, format);
VPrintError(format, args);
va_end(args);
}
// TODO(ajm): This works on Mac (although the parsing fails) but I don't seem
// to get usable symbols on Linux. This is copied from V8. Chromium has a more
// advanced stace trace system; also more difficult to copy.
void DumpBacktrace() {
#if defined(__GLIBCXX__) && !defined(__UCLIBC__)
void* trace[100];
int size = backtrace(trace, sizeof(trace) / sizeof(*trace));
char** symbols = backtrace_symbols(trace, size);
PrintError("\n==== C stack trace ===============================\n\n");
if (size == 0) {
PrintError("(empty)\n");
} else if (symbols == NULL) {
PrintError("(no symbols)\n");
} else {
for (int i = 1; i < size; ++i) {
char mangled[201];
if (sscanf(symbols[i], "%*[^(]%*[(]%200[^)+]", mangled) == 1) { // NOLINT
PrintError("%2d: ", i);
int status;
size_t length;
char* demangled = abi::__cxa_demangle(mangled, NULL, &length, &status);
PrintError("%s\n", demangled != NULL ? demangled : mangled);
free(demangled);
} else {
// If parsing failed, at least print the unparsed symbol.
PrintError("%s\n", symbols[i]);
}
}
}
free(symbols);
#endif
}
FatalMessage::FatalMessage(const char* file, int line) {
Init(file, line);
}
FatalMessage::FatalMessage(const char* file, int line, std::string* result) {
Init(file, line);
stream_ << "Check failed: " << *result << std::endl << "# ";
delete result;
}
NO_RETURN FatalMessage::~FatalMessage() {
fflush(stdout);
fflush(stderr);
stream_ << std::endl << "#" << std::endl;
PrintError(stream_.str().c_str());
DumpBacktrace();
fflush(stderr);
abort();
}
void FatalMessage::Init(const char* file, int line) {
stream_ << std::endl << std::endl << "#" << std::endl << "# Fatal error in "
<< file << ", line " << line << std::endl << "# ";
}
// MSVC doesn't like complex extern templates and DLLs.
#if !defined(COMPILER_MSVC)
// Explicit instantiations for commonly used comparisons.
template std::string* MakeCheckOpString<int, int>(
const int&, const int&, const char* names);
template std::string* MakeCheckOpString<unsigned long, unsigned long>(
const unsigned long&, const unsigned long&, const char* names);
template std::string* MakeCheckOpString<unsigned long, unsigned int>(
const unsigned long&, const unsigned int&, const char* names);
template std::string* MakeCheckOpString<unsigned int, unsigned long>(
const unsigned int&, const unsigned long&, const char* names);
template std::string* MakeCheckOpString<std::string, std::string>(
const std::string&, const std::string&, const char* name);
#endif
} // namespace rtc

View File

@ -1,229 +0,0 @@
/*
* Copyright 2006 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_BASE_CHECKS_H_
#define WEBRTC_BASE_CHECKS_H_
#include <sstream>
#include <string>
#include "webrtc/typedefs.h"
// The macros here print a message to stderr and abort under various
// conditions. All will accept additional stream messages. For example:
// RTC_DCHECK_EQ(foo, bar) << "I'm printed when foo != bar.";
//
// - RTC_CHECK(x) is an assertion that x is always true, and that if it isn't,
// it's better to terminate the process than to continue. During development,
// the reason that it's better to terminate might simply be that the error
// handling code isn't in place yet; in production, the reason might be that
// the author of the code truly believes that x will always be true, but that
// she recognizes that if she is wrong, abrupt and unpleasant process
// termination is still better than carrying on with the assumption violated.
//
// RTC_CHECK always evaluates its argument, so it's OK for x to have side
// effects.
//
// - RTC_DCHECK(x) is the same as RTC_CHECK(x)---an assertion that x is always
// true---except that x will only be evaluated in debug builds; in production
// builds, x is simply assumed to be true. This is useful if evaluating x is
// expensive and the expected cost of failing to detect the violated
// assumption is acceptable. You should not handle cases where a production
// build fails to spot a violated condition, even those that would result in
// crashes. If the code needs to cope with the error, make it cope, but don't
// call RTC_DCHECK; if the condition really can't occur, but you'd sleep
// better at night knowing that the process will suicide instead of carrying
// on in case you were wrong, use RTC_CHECK instead of RTC_DCHECK.
//
// RTC_DCHECK only evaluates its argument in debug builds, so if x has visible
// side effects, you need to write e.g.
// bool w = x; RTC_DCHECK(w);
//
// - RTC_CHECK_EQ, _NE, _GT, ..., and RTC_DCHECK_EQ, _NE, _GT, ... are
// specialized variants of RTC_CHECK and RTC_DCHECK that print prettier
// messages if the condition doesn't hold. Prefer them to raw RTC_CHECK and
// RTC_DCHECK.
//
// - FATAL() aborts unconditionally.
//
// TODO(ajm): Ideally, checks.h would be combined with logging.h, but
// consolidation with system_wrappers/logging.h should happen first.
namespace rtc {
// Helper macro which avoids evaluating the arguments to a stream if
// the condition doesn't hold.
#define RTC_LAZY_STREAM(stream, condition) \
!(condition) ? static_cast<void>(0) : rtc::FatalMessageVoidify() & (stream)
// The actual stream used isn't important. We reference condition in the code
// but don't evaluate it; this is to avoid "unused variable" warnings (we do so
// in a particularly convoluted way with an extra ?: because that appears to be
// the simplest construct that keeps Visual Studio from complaining about
// condition being unused).
#define RTC_EAT_STREAM_PARAMETERS(condition) \
(true ? true : !(condition)) \
? static_cast<void>(0) \
: rtc::FatalMessageVoidify() & rtc::FatalMessage("", 0).stream()
// RTC_CHECK dies with a fatal error if condition is not true. It is *not*
// controlled by NDEBUG, so the check will be executed regardless of
// compilation mode.
//
// We make sure RTC_CHECK et al. always evaluates their arguments, as
// doing RTC_CHECK(FunctionWithSideEffect()) is a common idiom.
#define RTC_CHECK(condition) \
RTC_LAZY_STREAM(rtc::FatalMessage(__FILE__, __LINE__).stream(), \
!(condition)) \
<< "Check failed: " #condition << std::endl << "# "
// Helper macro for binary operators.
// Don't use this macro directly in your code, use RTC_CHECK_EQ et al below.
//
// TODO(akalin): Rewrite this so that constructs like if (...)
// RTC_CHECK_EQ(...) else { ... } work properly.
#define RTC_CHECK_OP(name, op, val1, val2) \
if (std::string* _result = \
rtc::Check##name##Impl((val1), (val2), #val1 " " #op " " #val2)) \
rtc::FatalMessage(__FILE__, __LINE__, _result).stream()
// Build the error message string. This is separate from the "Impl"
// function template because it is not performance critical and so can
// be out of line, while the "Impl" code should be inline. Caller
// takes ownership of the returned string.
template<class t1, class t2>
std::string* MakeCheckOpString(const t1& v1, const t2& v2, const char* names) {
std::ostringstream ss;
ss << names << " (" << v1 << " vs. " << v2 << ")";
std::string* msg = new std::string(ss.str());
return msg;
}
// MSVC doesn't like complex extern templates and DLLs.
#if !defined(COMPILER_MSVC)
// Commonly used instantiations of MakeCheckOpString<>. Explicitly instantiated
// in logging.cc.
extern template std::string* MakeCheckOpString<int, int>(
const int&, const int&, const char* names);
extern template
std::string* MakeCheckOpString<unsigned long, unsigned long>(
const unsigned long&, const unsigned long&, const char* names);
extern template
std::string* MakeCheckOpString<unsigned long, unsigned int>(
const unsigned long&, const unsigned int&, const char* names);
extern template
std::string* MakeCheckOpString<unsigned int, unsigned long>(
const unsigned int&, const unsigned long&, const char* names);
extern template
std::string* MakeCheckOpString<std::string, std::string>(
const std::string&, const std::string&, const char* name);
#endif
// Helper functions for RTC_CHECK_OP macro.
// The (int, int) specialization works around the issue that the compiler
// will not instantiate the template version of the function on values of
// unnamed enum type - see comment below.
#define DEFINE_RTC_CHECK_OP_IMPL(name, op) \
template <class t1, class t2> \
inline std::string* Check##name##Impl(const t1& v1, const t2& v2, \
const char* names) { \
if (v1 op v2) \
return NULL; \
else \
return rtc::MakeCheckOpString(v1, v2, names); \
} \
inline std::string* Check##name##Impl(int v1, int v2, const char* names) { \
if (v1 op v2) \
return NULL; \
else \
return rtc::MakeCheckOpString(v1, v2, names); \
}
DEFINE_RTC_CHECK_OP_IMPL(EQ, ==)
DEFINE_RTC_CHECK_OP_IMPL(NE, !=)
DEFINE_RTC_CHECK_OP_IMPL(LE, <=)
DEFINE_RTC_CHECK_OP_IMPL(LT, < )
DEFINE_RTC_CHECK_OP_IMPL(GE, >=)
DEFINE_RTC_CHECK_OP_IMPL(GT, > )
#undef DEFINE_RTC_CHECK_OP_IMPL
#define RTC_CHECK_EQ(val1, val2) RTC_CHECK_OP(EQ, ==, val1, val2)
#define RTC_CHECK_NE(val1, val2) RTC_CHECK_OP(NE, !=, val1, val2)
#define RTC_CHECK_LE(val1, val2) RTC_CHECK_OP(LE, <=, val1, val2)
#define RTC_CHECK_LT(val1, val2) RTC_CHECK_OP(LT, < , val1, val2)
#define RTC_CHECK_GE(val1, val2) RTC_CHECK_OP(GE, >=, val1, val2)
#define RTC_CHECK_GT(val1, val2) RTC_CHECK_OP(GT, > , val1, val2)
// The RTC_DCHECK macro is equivalent to RTC_CHECK except that it only generates
// code in debug builds. It does reference the condition parameter in all cases,
// though, so callers won't risk getting warnings about unused variables.
#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON))
#define RTC_DCHECK_IS_ON 1
#define RTC_DCHECK(condition) RTC_CHECK(condition)
#define RTC_DCHECK_EQ(v1, v2) RTC_CHECK_EQ(v1, v2)
#define RTC_DCHECK_NE(v1, v2) RTC_CHECK_NE(v1, v2)
#define RTC_DCHECK_LE(v1, v2) RTC_CHECK_LE(v1, v2)
#define RTC_DCHECK_LT(v1, v2) RTC_CHECK_LT(v1, v2)
#define RTC_DCHECK_GE(v1, v2) RTC_CHECK_GE(v1, v2)
#define RTC_DCHECK_GT(v1, v2) RTC_CHECK_GT(v1, v2)
#else
#define RTC_DCHECK_IS_ON 0
#define RTC_DCHECK(condition) RTC_EAT_STREAM_PARAMETERS(condition)
#define RTC_DCHECK_EQ(v1, v2) RTC_EAT_STREAM_PARAMETERS((v1) == (v2))
#define RTC_DCHECK_NE(v1, v2) RTC_EAT_STREAM_PARAMETERS((v1) != (v2))
#define RTC_DCHECK_LE(v1, v2) RTC_EAT_STREAM_PARAMETERS((v1) <= (v2))
#define RTC_DCHECK_LT(v1, v2) RTC_EAT_STREAM_PARAMETERS((v1) < (v2))
#define RTC_DCHECK_GE(v1, v2) RTC_EAT_STREAM_PARAMETERS((v1) >= (v2))
#define RTC_DCHECK_GT(v1, v2) RTC_EAT_STREAM_PARAMETERS((v1) > (v2))
#endif
// This is identical to LogMessageVoidify but in name.
class FatalMessageVoidify {
public:
FatalMessageVoidify() { }
// This has to be an operator with a precedence lower than << but
// higher than ?:
void operator&(std::ostream&) { }
};
#define RTC_UNREACHABLE_CODE_HIT false
#define RTC_NOTREACHED() RTC_DCHECK(RTC_UNREACHABLE_CODE_HIT)
#define FATAL() rtc::FatalMessage(__FILE__, __LINE__).stream()
// TODO(ajm): Consider adding RTC_NOTIMPLEMENTED macro when
// base/logging.h and system_wrappers/logging.h are consolidated such that we
// can match the Chromium behavior.
// Like a stripped-down LogMessage from logging.h, except that it aborts.
class FatalMessage {
public:
FatalMessage(const char* file, int line);
// Used for RTC_CHECK_EQ(), etc. Takes ownership of the given string.
FatalMessage(const char* file, int line, std::string* result);
NO_RETURN ~FatalMessage();
std::ostream& stream() { return stream_; }
private:
void Init(const char* file, int line);
std::ostringstream stream_;
};
// Performs the integer division a/b and returns the result. CHECKs that the
// remainder is zero.
template <typename T>
inline T CheckedDivExact(T a, T b) {
RTC_CHECK_EQ(a % b, static_cast<T>(0));
return a / b;
}
} // namespace rtc
#endif // WEBRTC_BASE_CHECKS_H_

View File

@ -1,34 +0,0 @@
/*
* Copyright 2004 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_BASE_CONSTRUCTORMAGIC_H_
#define WEBRTC_BASE_CONSTRUCTORMAGIC_H_
// Put this in the declarations for a class to be unassignable.
#define RTC_DISALLOW_ASSIGN(TypeName) \
void operator=(const TypeName&) = delete
// A macro to disallow the copy constructor and operator= functions. This should
// be used in the declarations for a class.
#define RTC_DISALLOW_COPY_AND_ASSIGN(TypeName) \
TypeName(const TypeName&) = delete; \
RTC_DISALLOW_ASSIGN(TypeName)
// A macro to disallow all the implicit constructors, namely the default
// constructor, copy constructor and operator= functions.
//
// This should be used in the declarations for a class that wants to prevent
// anyone from instantiating it. This is especially useful for classes
// containing only static methods.
#define RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(TypeName) \
TypeName() = delete; \
RTC_DISALLOW_COPY_AND_ASSIGN(TypeName)
#endif // WEBRTC_BASE_CONSTRUCTORMAGIC_H_

View File

@ -1,169 +0,0 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/checks.h"
namespace rtc {
CriticalSection::CriticalSection() {
#if defined(WEBRTC_WIN)
InitializeCriticalSection(&crit_);
#else
pthread_mutexattr_t mutex_attribute;
pthread_mutexattr_init(&mutex_attribute);
pthread_mutexattr_settype(&mutex_attribute, PTHREAD_MUTEX_RECURSIVE);
pthread_mutex_init(&mutex_, &mutex_attribute);
pthread_mutexattr_destroy(&mutex_attribute);
CS_DEBUG_CODE(thread_ = 0);
CS_DEBUG_CODE(recursion_count_ = 0);
#endif
}
CriticalSection::~CriticalSection() {
#if defined(WEBRTC_WIN)
DeleteCriticalSection(&crit_);
#else
pthread_mutex_destroy(&mutex_);
#endif
}
void CriticalSection::Enter() EXCLUSIVE_LOCK_FUNCTION() {
#if defined(WEBRTC_WIN)
EnterCriticalSection(&crit_);
#else
pthread_mutex_lock(&mutex_);
#if CS_DEBUG_CHECKS
if (!recursion_count_) {
RTC_DCHECK(!thread_);
thread_ = pthread_self();
} else {
RTC_DCHECK(CurrentThreadIsOwner());
}
++recursion_count_;
#endif
#endif
}
bool CriticalSection::TryEnter() EXCLUSIVE_TRYLOCK_FUNCTION(true) {
#if defined(WEBRTC_WIN)
return TryEnterCriticalSection(&crit_) != FALSE;
#else
if (pthread_mutex_trylock(&mutex_) != 0)
return false;
#if CS_DEBUG_CHECKS
if (!recursion_count_) {
RTC_DCHECK(!thread_);
thread_ = pthread_self();
} else {
RTC_DCHECK(CurrentThreadIsOwner());
}
++recursion_count_;
#endif
return true;
#endif
}
void CriticalSection::Leave() UNLOCK_FUNCTION() {
RTC_DCHECK(CurrentThreadIsOwner());
#if defined(WEBRTC_WIN)
LeaveCriticalSection(&crit_);
#else
#if CS_DEBUG_CHECKS
--recursion_count_;
RTC_DCHECK(recursion_count_ >= 0);
if (!recursion_count_)
thread_ = 0;
#endif
pthread_mutex_unlock(&mutex_);
#endif
}
bool CriticalSection::CurrentThreadIsOwner() const {
#if defined(WEBRTC_WIN)
// OwningThread has type HANDLE but actually contains the Thread ID:
// http://stackoverflow.com/questions/12675301/why-is-the-owningthread-member-of-critical-section-of-type-handle-when-it-is-de
// Converting through size_t avoids the VS 2015 warning C4312: conversion from
// 'type1' to 'type2' of greater size
return crit_.OwningThread ==
reinterpret_cast<HANDLE>(static_cast<size_t>(GetCurrentThreadId()));
#else
#if CS_DEBUG_CHECKS
return pthread_equal(thread_, pthread_self());
#else
return true;
#endif // CS_DEBUG_CHECKS
#endif
}
bool CriticalSection::IsLocked() const {
#if defined(WEBRTC_WIN)
return crit_.LockCount != -1;
#else
#if CS_DEBUG_CHECKS
return thread_ != 0;
#else
return true;
#endif
#endif
}
CritScope::CritScope(CriticalSection* cs) : cs_(cs) { cs_->Enter(); }
CritScope::~CritScope() { cs_->Leave(); }
TryCritScope::TryCritScope(CriticalSection* cs)
: cs_(cs), locked_(cs->TryEnter()) {
CS_DEBUG_CODE(lock_was_called_ = false);
}
TryCritScope::~TryCritScope() {
CS_DEBUG_CODE(RTC_DCHECK(lock_was_called_));
if (locked_)
cs_->Leave();
}
bool TryCritScope::locked() const {
CS_DEBUG_CODE(lock_was_called_ = true);
return locked_;
}
void GlobalLockPod::Lock() {
#if !defined(WEBRTC_WIN)
const struct timespec ts_null = {0};
#endif
while (AtomicOps::CompareAndSwap(&lock_acquired, 0, 1)) {
#if defined(WEBRTC_WIN)
::Sleep(0);
#else
nanosleep(&ts_null, nullptr);
#endif
}
}
void GlobalLockPod::Unlock() {
int old_value = AtomicOps::CompareAndSwap(&lock_acquired, 1, 0);
RTC_DCHECK_EQ(1, old_value) << "Unlock called without calling Lock first";
}
GlobalLock::GlobalLock() {
lock_acquired = 0;
}
GlobalLockScope::GlobalLockScope(GlobalLockPod* lock)
: lock_(lock) {
lock_->Lock();
}
GlobalLockScope::~GlobalLockScope() {
lock_->Unlock();
}
} // namespace rtc

View File

@ -1,129 +0,0 @@
/*
* Copyright 2004 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_BASE_CRITICALSECTION_H_
#define WEBRTC_BASE_CRITICALSECTION_H_
#include "webrtc/base/atomicops.h"
#include "webrtc/base/constructormagic.h"
#include "webrtc/base/thread_annotations.h"
#if defined(WEBRTC_WIN)
// Include winsock2.h before including <windows.h> to maintain consistency with
// win32.h. We can't include win32.h directly here since it pulls in
// headers such as basictypes.h which causes problems in Chromium where webrtc
// exists as two separate projects, webrtc and libjingle.
#include <winsock2.h>
#include <windows.h>
#include <sal.h> // must come after windows headers.
#endif // defined(WEBRTC_WIN)
#if defined(WEBRTC_POSIX)
#include <pthread.h>
#endif
#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON))
#define CS_DEBUG_CHECKS 1
#endif
#if CS_DEBUG_CHECKS
#define CS_DEBUG_CODE(x) x
#else // !CS_DEBUG_CHECKS
#define CS_DEBUG_CODE(x)
#endif // !CS_DEBUG_CHECKS
namespace rtc {
class LOCKABLE CriticalSection {
public:
CriticalSection();
~CriticalSection();
void Enter() EXCLUSIVE_LOCK_FUNCTION();
bool TryEnter() EXCLUSIVE_TRYLOCK_FUNCTION(true);
void Leave() UNLOCK_FUNCTION();
// Use only for RTC_DCHECKing.
bool CurrentThreadIsOwner() const;
// Use only for RTC_DCHECKing.
bool IsLocked() const;
private:
#if defined(WEBRTC_WIN)
CRITICAL_SECTION crit_;
#elif defined(WEBRTC_POSIX)
pthread_mutex_t mutex_;
CS_DEBUG_CODE(pthread_t thread_);
CS_DEBUG_CODE(int recursion_count_);
#endif
};
// CritScope, for serializing execution through a scope.
class SCOPED_LOCKABLE CritScope {
public:
explicit CritScope(CriticalSection* cs) EXCLUSIVE_LOCK_FUNCTION(cs);
~CritScope() UNLOCK_FUNCTION();
private:
CriticalSection* const cs_;
RTC_DISALLOW_COPY_AND_ASSIGN(CritScope);
};
// Tries to lock a critical section on construction via
// CriticalSection::TryEnter, and unlocks on destruction if the
// lock was taken. Never blocks.
//
// IMPORTANT: Unlike CritScope, the lock may not be owned by this thread in
// subsequent code. Users *must* check locked() to determine if the
// lock was taken. If you're not calling locked(), you're doing it wrong!
class TryCritScope {
public:
explicit TryCritScope(CriticalSection* cs);
~TryCritScope();
#if defined(WEBRTC_WIN)
_Check_return_ bool locked() const;
#else
bool locked() const __attribute__((warn_unused_result));
#endif
private:
CriticalSection* const cs_;
const bool locked_;
CS_DEBUG_CODE(mutable bool lock_was_called_);
RTC_DISALLOW_COPY_AND_ASSIGN(TryCritScope);
};
// A POD lock used to protect global variables. Do NOT use for other purposes.
// No custom constructor or private data member should be added.
class LOCKABLE GlobalLockPod {
public:
void Lock() EXCLUSIVE_LOCK_FUNCTION();
void Unlock() UNLOCK_FUNCTION();
volatile int lock_acquired;
};
class GlobalLock : public GlobalLockPod {
public:
GlobalLock();
};
// GlobalLockScope, for serializing execution through a scope.
class SCOPED_LOCKABLE GlobalLockScope {
public:
explicit GlobalLockScope(GlobalLockPod* lock) EXCLUSIVE_LOCK_FUNCTION(lock);
~GlobalLockScope() UNLOCK_FUNCTION();
private:
GlobalLockPod* const lock_;
RTC_DISALLOW_COPY_AND_ASSIGN(GlobalLockScope);
};
} // namespace rtc
#endif // WEBRTC_BASE_CRITICALSECTION_H_

View File

@ -1,135 +0,0 @@
/*
* Copyright 2004 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/base/event.h"
#if defined(WEBRTC_WIN)
#include <windows.h>
#elif defined(WEBRTC_POSIX)
#include <pthread.h>
#include <sys/time.h>
#include <time.h>
#else
#error "Must define either WEBRTC_WIN or WEBRTC_POSIX."
#endif
#include "webrtc/base/checks.h"
namespace rtc {
#if defined(WEBRTC_WIN)
Event::Event(bool manual_reset, bool initially_signaled) {
event_handle_ = ::CreateEvent(NULL, // Security attributes.
manual_reset,
initially_signaled,
NULL); // Name.
RTC_CHECK(event_handle_);
}
Event::~Event() {
CloseHandle(event_handle_);
}
void Event::Set() {
SetEvent(event_handle_);
}
void Event::Reset() {
ResetEvent(event_handle_);
}
bool Event::Wait(int milliseconds) {
DWORD ms = (milliseconds == kForever) ? INFINITE : milliseconds;
return (WaitForSingleObject(event_handle_, ms) == WAIT_OBJECT_0);
}
#elif defined(WEBRTC_POSIX)
Event::Event(bool manual_reset, bool initially_signaled)
: is_manual_reset_(manual_reset),
event_status_(initially_signaled) {
RTC_CHECK(pthread_mutex_init(&event_mutex_, NULL) == 0);
RTC_CHECK(pthread_cond_init(&event_cond_, NULL) == 0);
}
Event::~Event() {
pthread_mutex_destroy(&event_mutex_);
pthread_cond_destroy(&event_cond_);
}
void Event::Set() {
pthread_mutex_lock(&event_mutex_);
event_status_ = true;
pthread_cond_broadcast(&event_cond_);
pthread_mutex_unlock(&event_mutex_);
}
void Event::Reset() {
pthread_mutex_lock(&event_mutex_);
event_status_ = false;
pthread_mutex_unlock(&event_mutex_);
}
bool Event::Wait(int milliseconds) {
pthread_mutex_lock(&event_mutex_);
int error = 0;
if (milliseconds != kForever) {
// Converting from seconds and microseconds (1e-6) plus
// milliseconds (1e-3) to seconds and nanoseconds (1e-9).
struct timespec ts;
#if HAVE_PTHREAD_COND_TIMEDWAIT_RELATIVE
// Use relative time version, which tends to be more efficient for
// pthread implementations where provided (like on Android).
ts.tv_sec = milliseconds / 1000;
ts.tv_nsec = (milliseconds % 1000) * 1000000;
#else
struct timeval tv;
gettimeofday(&tv, NULL);
ts.tv_sec = tv.tv_sec + (milliseconds / 1000);
ts.tv_nsec = tv.tv_usec * 1000 + (milliseconds % 1000) * 1000000;
// Handle overflow.
if (ts.tv_nsec >= 1000000000) {
ts.tv_sec++;
ts.tv_nsec -= 1000000000;
}
#endif
while (!event_status_ && error == 0) {
#if HAVE_PTHREAD_COND_TIMEDWAIT_RELATIVE
error = pthread_cond_timedwait_relative_np(
&event_cond_, &event_mutex_, &ts);
#else
error = pthread_cond_timedwait(&event_cond_, &event_mutex_, &ts);
#endif
}
} else {
while (!event_status_ && error == 0)
error = pthread_cond_wait(&event_cond_, &event_mutex_);
}
// NOTE(liulk): Exactly one thread will auto-reset this event. All
// the other threads will think it's unsignaled. This seems to be
// consistent with auto-reset events in WEBRTC_WIN
if (error == 0 && !is_manual_reset_)
event_status_ = false;
pthread_mutex_unlock(&event_mutex_);
return (error == 0);
}
#endif
} // namespace rtc

View File

@ -1,53 +0,0 @@
/*
* Copyright 2004 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_BASE_EVENT_H__
#define WEBRTC_BASE_EVENT_H__
#if defined(WEBRTC_WIN)
#include "webrtc/base/win32.h" // NOLINT: consider this a system header.
#elif defined(WEBRTC_POSIX)
#include <pthread.h>
#else
#error "Must define either WEBRTC_WIN or WEBRTC_POSIX."
#endif
#include "webrtc/base/basictypes.h"
namespace rtc {
class Event {
public:
static const int kForever = -1;
Event(bool manual_reset, bool initially_signaled);
~Event();
void Set();
void Reset();
// Wait for the event to become signaled, for the specified number of
// |milliseconds|. To wait indefinetly, pass kForever.
bool Wait(int milliseconds);
private:
#if defined(WEBRTC_WIN)
HANDLE event_handle_;
#elif defined(WEBRTC_POSIX)
pthread_mutex_t event_mutex_;
pthread_cond_t event_cond_;
const bool is_manual_reset_;
bool event_status_;
#endif
};
} // namespace rtc
#endif // WEBRTC_BASE_EVENT_H__

View File

@ -1,110 +0,0 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_BASE_MAYBE_H_
#define WEBRTC_BASE_MAYBE_H_
#include <algorithm>
#include <utility>
#include "webrtc/base/checks.h"
namespace rtc {
// Simple std::experimental::optional-wannabe. It either contains a T or not.
// In order to keep the implementation simple and portable, this implementation
// actually contains a (default-constructed) T even when it supposedly doesn't
// contain a value; use e.g. rtc::scoped_ptr<T> instead if that's too
// expensive.
//
// A moved-from Maybe<T> may only be destroyed, and assigned to if T allows
// being assigned to after having been moved from. Specifically, you may not
// assume that it just doesn't contain a value anymore.
//
// TODO(kwiberg): Get rid of this class when the standard library has
// std::optional (and we're allowed to use it).
template <typename T>
class Maybe final {
public:
// Construct an empty Maybe.
Maybe() : has_value_(false) {}
// Construct a Maybe that contains a value.
explicit Maybe(const T& val) : value_(val), has_value_(true) {}
explicit Maybe(T&& val) : value_(static_cast<T&&>(val)), has_value_(true) {}
// Copy and move constructors.
// TODO(kwiberg): =default the move constructor when MSVC supports it.
Maybe(const Maybe&) = default;
Maybe(Maybe&& m)
: value_(static_cast<T&&>(m.value_)), has_value_(m.has_value_) {}
// Assignment.
// TODO(kwiberg): =default the move assignment op when MSVC supports it.
Maybe& operator=(const Maybe&) = default;
Maybe& operator=(Maybe&& m) {
value_ = static_cast<T&&>(m.value_);
has_value_ = m.has_value_;
return *this;
}
friend void swap(Maybe& m1, Maybe& m2) {
using std::swap;
swap(m1.value_, m2.value_);
swap(m1.has_value_, m2.has_value_);
}
// Conversion to bool to test if we have a value.
explicit operator bool() const { return has_value_; }
// Dereferencing. Only allowed if we have a value.
const T* operator->() const {
RTC_DCHECK(has_value_);
return &value_;
}
T* operator->() {
RTC_DCHECK(has_value_);
return &value_;
}
const T& operator*() const {
RTC_DCHECK(has_value_);
return value_;
}
T& operator*() {
RTC_DCHECK(has_value_);
return value_;
}
// Dereference with a default value in case we don't have a value.
const T& value_or(const T& default_val) const {
return has_value_ ? value_ : default_val;
}
// Equality tests. Two Maybes are equal if they contain equivalent values, or
// if they're both empty.
friend bool operator==(const Maybe& m1, const Maybe& m2) {
return m1.has_value_ && m2.has_value_ ? m1.value_ == m2.value_
: m1.has_value_ == m2.has_value_;
}
friend bool operator!=(const Maybe& m1, const Maybe& m2) {
return m1.has_value_ && m2.has_value_ ? m1.value_ != m2.value_
: m1.has_value_ != m2.has_value_;
}
private:
// Invariant: Unless *this has been moved from, value_ is default-initialized
// (or copied or moved from a default-initialized T) if !has_value_.
T value_;
bool has_value_;
};
} // namespace rtc
#endif // WEBRTC_BASE_MAYBE_H_

View File

@ -1,34 +0,0 @@
base_sources = [
'criticalsection.cc',
'checks.cc',
'event.cc',
'platform_thread.cc',
'platform_file.cc',
'stringutils.cc',
'thread_checker_impl.cc',
]
base_headers = [
'arraysize.h',
'checks.h',
'constructormagic.h',
'basictypes.h',
'maybe.h',
'platform_file.h',
]
install_headers(base_headers,
subdir: 'webrtc_audio_processing/webrtc/base'
)
libbase = static_library('libbase',
base_sources,
dependencies: common_deps,
include_directories: webrtc_inc,
cpp_args : common_cxxflags
)
base_dep = declare_dependency(
link_with: libbase
)

View File

@ -1,49 +0,0 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/base/platform_file.h"
#if defined(WEBRTC_WIN)
#include <io.h>
#else
#include <unistd.h>
#endif
namespace rtc {
#if defined(WEBRTC_WIN)
const PlatformFile kInvalidPlatformFileValue = INVALID_HANDLE_VALUE;
FILE* FdopenPlatformFileForWriting(PlatformFile file) {
if (file == kInvalidPlatformFileValue)
return NULL;
int fd = _open_osfhandle(reinterpret_cast<intptr_t>(file), 0);
if (fd < 0)
return NULL;
return _fdopen(fd, "w");
}
bool ClosePlatformFile(PlatformFile file) {
return CloseHandle(file) != 0;
}
#else
const PlatformFile kInvalidPlatformFileValue = -1;
FILE* FdopenPlatformFileForWriting(PlatformFile file) {
return fdopen(file, "w");
}
bool ClosePlatformFile(PlatformFile file) {
return close(file);
}
#endif
} // namespace rtc

View File

@ -1,44 +0,0 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_BASE_PLATFORM_FILE_H_
#define WEBRTC_BASE_PLATFORM_FILE_H_
#include <stdio.h>
#if defined(WEBRTC_WIN)
#include <windows.h>
#endif
namespace rtc {
#if defined(WEBRTC_WIN)
typedef HANDLE PlatformFile;
#elif defined(WEBRTC_POSIX)
typedef int PlatformFile;
#else
#error Unsupported platform
#endif
extern const PlatformFile kInvalidPlatformFileValue;
// Associates a standard FILE stream with an existing PlatformFile.
// Note that after this function has returned a valid FILE stream,
// the PlatformFile should no longer be used.
FILE* FdopenPlatformFileForWriting(PlatformFile file);
// Closes a PlatformFile.
// Don't use ClosePlatformFile to close a file opened with FdopenPlatformFile.
// Use fclose instead.
bool ClosePlatformFile(PlatformFile file);
} // namespace rtc
#endif // WEBRTC_BASE_PLATFORM_FILE_H_

View File

@ -1,86 +0,0 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/base/platform_thread.h"
#include <string.h>
#include "webrtc/base/checks.h"
#if defined(WEBRTC_LINUX)
#include <sys/prctl.h>
#include <sys/syscall.h>
#elif defined(WEBRTC_GNU)
#include <sys/syscall.h>
#endif
namespace rtc {
PlatformThreadId CurrentThreadId() {
PlatformThreadId ret;
#if defined(WEBRTC_WIN)
ret = GetCurrentThreadId();
#elif defined(WEBRTC_POSIX)
#if defined(WEBRTC_MAC) || defined(WEBRTC_IOS)
ret = pthread_mach_thread_np(pthread_self());
#elif defined(WEBRTC_LINUX)
ret = syscall(__NR_gettid);
#elif defined(WEBRTC_ANDROID)
ret = gettid();
#elif defined(WEBRTC_GNU)
ret = pthread_self();
#else
// Default implementation for nacl and solaris.
ret = reinterpret_cast<pid_t>(pthread_self());
#endif
#endif // defined(WEBRTC_POSIX)
RTC_DCHECK(ret);
return ret;
}
PlatformThreadRef CurrentThreadRef() {
#if defined(WEBRTC_WIN)
return GetCurrentThreadId();
#elif defined(WEBRTC_POSIX)
return pthread_self();
#endif
}
bool IsThreadRefEqual(const PlatformThreadRef& a, const PlatformThreadRef& b) {
#if defined(WEBRTC_WIN)
return a == b;
#elif defined(WEBRTC_POSIX)
return pthread_equal(a, b);
#endif
}
void SetCurrentThreadName(const char* name) {
RTC_DCHECK(strlen(name) < 64);
#if defined(WEBRTC_WIN)
struct {
DWORD dwType;
LPCSTR szName;
DWORD dwThreadID;
DWORD dwFlags;
} threadname_info = {0x1000, name, static_cast<DWORD>(-1), 0};
__try {
::RaiseException(0x406D1388, 0, sizeof(threadname_info) / sizeof(DWORD),
reinterpret_cast<ULONG_PTR*>(&threadname_info));
} __except (EXCEPTION_EXECUTE_HANDLER) {
}
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_ANDROID)
prctl(PR_SET_NAME, reinterpret_cast<unsigned long>(name));
#elif defined(WEBRTC_MAC) || defined(WEBRTC_IOS)
pthread_setname_np(name);
#endif
}
} // namespace rtc

View File

@ -1,636 +0,0 @@
/*
* Copyright 2012 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// Borrowed from Chromium's src/base/memory/scoped_ptr.h.
// Scopers help you manage ownership of a pointer, helping you easily manage a
// pointer within a scope, and automatically destroying the pointer at the end
// of a scope. There are two main classes you will use, which correspond to the
// operators new/delete and new[]/delete[].
//
// Example usage (scoped_ptr<T>):
// {
// scoped_ptr<Foo> foo(new Foo("wee"));
// } // foo goes out of scope, releasing the pointer with it.
//
// {
// scoped_ptr<Foo> foo; // No pointer managed.
// foo.reset(new Foo("wee")); // Now a pointer is managed.
// foo.reset(new Foo("wee2")); // Foo("wee") was destroyed.
// foo.reset(new Foo("wee3")); // Foo("wee2") was destroyed.
// foo->Method(); // Foo::Method() called.
// foo.get()->Method(); // Foo::Method() called.
// SomeFunc(foo.release()); // SomeFunc takes ownership, foo no longer
// // manages a pointer.
// foo.reset(new Foo("wee4")); // foo manages a pointer again.
// foo.reset(); // Foo("wee4") destroyed, foo no longer
// // manages a pointer.
// } // foo wasn't managing a pointer, so nothing was destroyed.
//
// Example usage (scoped_ptr<T[]>):
// {
// scoped_ptr<Foo[]> foo(new Foo[100]);
// foo.get()->Method(); // Foo::Method on the 0th element.
// foo[10].Method(); // Foo::Method on the 10th element.
// }
//
// These scopers also implement part of the functionality of C++11 unique_ptr
// in that they are "movable but not copyable." You can use the scopers in
// the parameter and return types of functions to signify ownership transfer
// in to and out of a function. When calling a function that has a scoper
// as the argument type, it must be called with the result of an analogous
// scoper's Pass() function or another function that generates a temporary;
// passing by copy will NOT work. Here is an example using scoped_ptr:
//
// void TakesOwnership(scoped_ptr<Foo> arg) {
// // Do something with arg
// }
// scoped_ptr<Foo> CreateFoo() {
// // No need for calling Pass() because we are constructing a temporary
// // for the return value.
// return scoped_ptr<Foo>(new Foo("new"));
// }
// scoped_ptr<Foo> PassThru(scoped_ptr<Foo> arg) {
// return arg.Pass();
// }
//
// {
// scoped_ptr<Foo> ptr(new Foo("yay")); // ptr manages Foo("yay").
// TakesOwnership(ptr.Pass()); // ptr no longer owns Foo("yay").
// scoped_ptr<Foo> ptr2 = CreateFoo(); // ptr2 owns the return Foo.
// scoped_ptr<Foo> ptr3 = // ptr3 now owns what was in ptr2.
// PassThru(ptr2.Pass()); // ptr2 is correspondingly nullptr.
// }
//
// Notice that if you do not call Pass() when returning from PassThru(), or
// when invoking TakesOwnership(), the code will not compile because scopers
// are not copyable; they only implement move semantics which require calling
// the Pass() function to signify a destructive transfer of state. CreateFoo()
// is different though because we are constructing a temporary on the return
// line and thus can avoid needing to call Pass().
//
// Pass() properly handles upcast in initialization, i.e. you can use a
// scoped_ptr<Child> to initialize a scoped_ptr<Parent>:
//
// scoped_ptr<Foo> foo(new Foo());
// scoped_ptr<FooParent> parent(foo.Pass());
//
// PassAs<>() should be used to upcast return value in return statement:
//
// scoped_ptr<Foo> CreateFoo() {
// scoped_ptr<FooChild> result(new FooChild());
// return result.PassAs<Foo>();
// }
//
// Note that PassAs<>() is implemented only for scoped_ptr<T>, but not for
// scoped_ptr<T[]>. This is because casting array pointers may not be safe.
#ifndef WEBRTC_BASE_SCOPED_PTR_H__
#define WEBRTC_BASE_SCOPED_PTR_H__
// This is an implementation designed to match the anticipated future TR2
// implementation of the scoped_ptr class.
#include <assert.h>
#include <stddef.h>
#include <stdlib.h>
#include <algorithm> // For std::swap().
#include "webrtc/base/constructormagic.h"
#include "webrtc/base/template_util.h"
#include "webrtc/typedefs.h"
namespace rtc {
// Function object which deletes its parameter, which must be a pointer.
// If C is an array type, invokes 'delete[]' on the parameter; otherwise,
// invokes 'delete'. The default deleter for scoped_ptr<T>.
template <class T>
struct DefaultDeleter {
DefaultDeleter() {}
template <typename U> DefaultDeleter(const DefaultDeleter<U>& other) {
// IMPLEMENTATION NOTE: C++11 20.7.1.1.2p2 only provides this constructor
// if U* is implicitly convertible to T* and U is not an array type.
//
// Correct implementation should use SFINAE to disable this
// constructor. However, since there are no other 1-argument constructors,
// using a static_assert based on is_convertible<> and requiring
// complete types is simpler and will cause compile failures for equivalent
// misuses.
//
// Note, the is_convertible<U*, T*> check also ensures that U is not an
// array. T is guaranteed to be a non-array, so any U* where U is an array
// cannot convert to T*.
enum { T_must_be_complete = sizeof(T) };
enum { U_must_be_complete = sizeof(U) };
static_assert(rtc::is_convertible<U*, T*>::value,
"U* must implicitly convert to T*");
}
inline void operator()(T* ptr) const {
enum { type_must_be_complete = sizeof(T) };
delete ptr;
}
};
// Specialization of DefaultDeleter for array types.
template <class T>
struct DefaultDeleter<T[]> {
inline void operator()(T* ptr) const {
enum { type_must_be_complete = sizeof(T) };
delete[] ptr;
}
private:
// Disable this operator for any U != T because it is undefined to execute
// an array delete when the static type of the array mismatches the dynamic
// type.
//
// References:
// C++98 [expr.delete]p3
// http://cplusplus.github.com/LWG/lwg-defects.html#938
template <typename U> void operator()(U* array) const;
};
template <class T, int n>
struct DefaultDeleter<T[n]> {
// Never allow someone to declare something like scoped_ptr<int[10]>.
static_assert(sizeof(T) == -1, "do not use array with size as type");
};
// Function object which invokes 'free' on its parameter, which must be
// a pointer. Can be used to store malloc-allocated pointers in scoped_ptr:
//
// scoped_ptr<int, rtc::FreeDeleter> foo_ptr(
// static_cast<int*>(malloc(sizeof(int))));
struct FreeDeleter {
inline void operator()(void* ptr) const {
free(ptr);
}
};
namespace internal {
template <typename T>
struct ShouldAbortOnSelfReset {
template <typename U>
static rtc::internal::NoType Test(const typename U::AllowSelfReset*);
template <typename U>
static rtc::internal::YesType Test(...);
static const bool value =
sizeof(Test<T>(0)) == sizeof(rtc::internal::YesType);
};
// Minimal implementation of the core logic of scoped_ptr, suitable for
// reuse in both scoped_ptr and its specializations.
template <class T, class D>
class scoped_ptr_impl {
public:
explicit scoped_ptr_impl(T* p) : data_(p) {}
// Initializer for deleters that have data parameters.
scoped_ptr_impl(T* p, const D& d) : data_(p, d) {}
// Templated constructor that destructively takes the value from another
// scoped_ptr_impl.
template <typename U, typename V>
scoped_ptr_impl(scoped_ptr_impl<U, V>* other)
: data_(other->release(), other->get_deleter()) {
// We do not support move-only deleters. We could modify our move
// emulation to have rtc::subtle::move() and rtc::subtle::forward()
// functions that are imperfect emulations of their C++11 equivalents,
// but until there's a requirement, just assume deleters are copyable.
}
template <typename U, typename V>
void TakeState(scoped_ptr_impl<U, V>* other) {
// See comment in templated constructor above regarding lack of support
// for move-only deleters.
reset(other->release());
get_deleter() = other->get_deleter();
}
~scoped_ptr_impl() {
if (data_.ptr != nullptr) {
// Not using get_deleter() saves one function call in non-optimized
// builds.
static_cast<D&>(data_)(data_.ptr);
}
}
void reset(T* p) {
// This is a self-reset, which is no longer allowed for default deleters:
// https://crbug.com/162971
assert(!ShouldAbortOnSelfReset<D>::value || p == nullptr || p != data_.ptr);
// Note that running data_.ptr = p can lead to undefined behavior if
// get_deleter()(get()) deletes this. In order to prevent this, reset()
// should update the stored pointer before deleting its old value.
//
// However, changing reset() to use that behavior may cause current code to
// break in unexpected ways. If the destruction of the owned object
// dereferences the scoped_ptr when it is destroyed by a call to reset(),
// then it will incorrectly dispatch calls to |p| rather than the original
// value of |data_.ptr|.
//
// During the transition period, set the stored pointer to nullptr while
// deleting the object. Eventually, this safety check will be removed to
// prevent the scenario initially described from occurring and
// http://crbug.com/176091 can be closed.
T* old = data_.ptr;
data_.ptr = nullptr;
if (old != nullptr)
static_cast<D&>(data_)(old);
data_.ptr = p;
}
T* get() const { return data_.ptr; }
D& get_deleter() { return data_; }
const D& get_deleter() const { return data_; }
void swap(scoped_ptr_impl& p2) {
// Standard swap idiom: 'using std::swap' ensures that std::swap is
// present in the overload set, but we call swap unqualified so that
// any more-specific overloads can be used, if available.
using std::swap;
swap(static_cast<D&>(data_), static_cast<D&>(p2.data_));
swap(data_.ptr, p2.data_.ptr);
}
T* release() {
T* old_ptr = data_.ptr;
data_.ptr = nullptr;
return old_ptr;
}
T** accept() {
reset(nullptr);
return &(data_.ptr);
}
T** use() {
return &(data_.ptr);
}
private:
// Needed to allow type-converting constructor.
template <typename U, typename V> friend class scoped_ptr_impl;
// Use the empty base class optimization to allow us to have a D
// member, while avoiding any space overhead for it when D is an
// empty class. See e.g. http://www.cantrip.org/emptyopt.html for a good
// discussion of this technique.
struct Data : public D {
explicit Data(T* ptr_in) : ptr(ptr_in) {}
Data(T* ptr_in, const D& other) : D(other), ptr(ptr_in) {}
T* ptr;
};
Data data_;
RTC_DISALLOW_COPY_AND_ASSIGN(scoped_ptr_impl);
};
} // namespace internal
// A scoped_ptr<T> is like a T*, except that the destructor of scoped_ptr<T>
// automatically deletes the pointer it holds (if any).
// That is, scoped_ptr<T> owns the T object that it points to.
// Like a T*, a scoped_ptr<T> may hold either nullptr or a pointer to a T
// object. Also like T*, scoped_ptr<T> is thread-compatible, and once you
// dereference it, you get the thread safety guarantees of T.
//
// The size of scoped_ptr is small. On most compilers, when using the
// DefaultDeleter, sizeof(scoped_ptr<T>) == sizeof(T*). Custom deleters will
// increase the size proportional to whatever state they need to have. See
// comments inside scoped_ptr_impl<> for details.
//
// Current implementation targets having a strict subset of C++11's
// unique_ptr<> features. Known deficiencies include not supporting move-only
// deleters, function pointers as deleters, and deleters with reference
// types.
template <class T, class D = rtc::DefaultDeleter<T> >
class scoped_ptr {
// TODO(ajm): If we ever import RefCountedBase, this check needs to be
// enabled.
//static_assert(rtc::internal::IsNotRefCounted<T>::value,
// "T is refcounted type and needs scoped refptr");
public:
// The element and deleter types.
typedef T element_type;
typedef D deleter_type;
// Constructor. Defaults to initializing with nullptr.
scoped_ptr() : impl_(nullptr) {}
// Constructor. Takes ownership of p.
explicit scoped_ptr(element_type* p) : impl_(p) {}
// Constructor. Allows initialization of a stateful deleter.
scoped_ptr(element_type* p, const D& d) : impl_(p, d) {}
// Constructor. Allows construction from a nullptr.
scoped_ptr(decltype(nullptr)) : impl_(nullptr) {}
// Constructor. Allows construction from a scoped_ptr rvalue for a
// convertible type and deleter.
//
// IMPLEMENTATION NOTE: C++11 unique_ptr<> keeps this constructor distinct
// from the normal move constructor. By C++11 20.7.1.2.1.21, this constructor
// has different post-conditions if D is a reference type. Since this
// implementation does not support deleters with reference type,
// we do not need a separate move constructor allowing us to avoid one
// use of SFINAE. You only need to care about this if you modify the
// implementation of scoped_ptr.
template <typename U, typename V>
scoped_ptr(scoped_ptr<U, V>&& other)
: impl_(&other.impl_) {
static_assert(!rtc::is_array<U>::value, "U cannot be an array");
}
// operator=. Allows assignment from a scoped_ptr rvalue for a convertible
// type and deleter.
//
// IMPLEMENTATION NOTE: C++11 unique_ptr<> keeps this operator= distinct from
// the normal move assignment operator. By C++11 20.7.1.2.3.4, this templated
// form has different requirements on for move-only Deleters. Since this
// implementation does not support move-only Deleters, we do not need a
// separate move assignment operator allowing us to avoid one use of SFINAE.
// You only need to care about this if you modify the implementation of
// scoped_ptr.
template <typename U, typename V>
scoped_ptr& operator=(scoped_ptr<U, V>&& rhs) {
static_assert(!rtc::is_array<U>::value, "U cannot be an array");
impl_.TakeState(&rhs.impl_);
return *this;
}
// operator=. Allows assignment from a nullptr. Deletes the currently owned
// object, if any.
scoped_ptr& operator=(decltype(nullptr)) {
reset();
return *this;
}
// Deleted copy constructor and copy assignment, to make the type move-only.
scoped_ptr(const scoped_ptr& other) = delete;
scoped_ptr& operator=(const scoped_ptr& other) = delete;
// Get an rvalue reference. (sp.Pass() does the same thing as std::move(sp).)
scoped_ptr&& Pass() { return static_cast<scoped_ptr&&>(*this); }
// Reset. Deletes the currently owned object, if any.
// Then takes ownership of a new object, if given.
void reset(element_type* p = nullptr) { impl_.reset(p); }
// Accessors to get the owned object.
// operator* and operator-> will assert() if there is no current object.
element_type& operator*() const {
assert(impl_.get() != nullptr);
return *impl_.get();
}
element_type* operator->() const {
assert(impl_.get() != nullptr);
return impl_.get();
}
element_type* get() const { return impl_.get(); }
// Access to the deleter.
deleter_type& get_deleter() { return impl_.get_deleter(); }
const deleter_type& get_deleter() const { return impl_.get_deleter(); }
// Allow scoped_ptr<element_type> to be used in boolean expressions, but not
// implicitly convertible to a real bool (which is dangerous).
//
// Note that this trick is only safe when the == and != operators
// are declared explicitly, as otherwise "scoped_ptr1 ==
// scoped_ptr2" will compile but do the wrong thing (i.e., convert
// to Testable and then do the comparison).
private:
typedef rtc::internal::scoped_ptr_impl<element_type, deleter_type>
scoped_ptr::*Testable;
public:
operator Testable() const {
return impl_.get() ? &scoped_ptr::impl_ : nullptr;
}
// Comparison operators.
// These return whether two scoped_ptr refer to the same object, not just to
// two different but equal objects.
bool operator==(const element_type* p) const { return impl_.get() == p; }
bool operator!=(const element_type* p) const { return impl_.get() != p; }
// Swap two scoped pointers.
void swap(scoped_ptr& p2) {
impl_.swap(p2.impl_);
}
// Release a pointer.
// The return value is the current pointer held by this object. If this object
// holds a nullptr, the return value is nullptr. After this operation, this
// object will hold a nullptr, and will not own the object any more.
element_type* release() WARN_UNUSED_RESULT {
return impl_.release();
}
// Delete the currently held pointer and return a pointer
// to allow overwriting of the current pointer address.
element_type** accept() WARN_UNUSED_RESULT {
return impl_.accept();
}
// Return a pointer to the current pointer address.
element_type** use() WARN_UNUSED_RESULT {
return impl_.use();
}
private:
// Needed to reach into |impl_| in the constructor.
template <typename U, typename V> friend class scoped_ptr;
rtc::internal::scoped_ptr_impl<element_type, deleter_type> impl_;
// Forbidden for API compatibility with std::unique_ptr.
explicit scoped_ptr(int disallow_construction_from_null);
// Forbid comparison of scoped_ptr types. If U != T, it totally
// doesn't make sense, and if U == T, it still doesn't make sense
// because you should never have the same object owned by two different
// scoped_ptrs.
template <class U> bool operator==(scoped_ptr<U> const& p2) const;
template <class U> bool operator!=(scoped_ptr<U> const& p2) const;
};
template <class T, class D>
class scoped_ptr<T[], D> {
public:
// The element and deleter types.
typedef T element_type;
typedef D deleter_type;
// Constructor. Defaults to initializing with nullptr.
scoped_ptr() : impl_(nullptr) {}
// Constructor. Stores the given array. Note that the argument's type
// must exactly match T*. In particular:
// - it cannot be a pointer to a type derived from T, because it is
// inherently unsafe in the general case to access an array through a
// pointer whose dynamic type does not match its static type (eg., if
// T and the derived types had different sizes access would be
// incorrectly calculated). Deletion is also always undefined
// (C++98 [expr.delete]p3). If you're doing this, fix your code.
// - it cannot be const-qualified differently from T per unique_ptr spec
// (http://cplusplus.github.com/LWG/lwg-active.html#2118). Users wanting
// to work around this may use implicit_cast<const T*>().
// However, because of the first bullet in this comment, users MUST
// NOT use implicit_cast<Base*>() to upcast the static type of the array.
explicit scoped_ptr(element_type* array) : impl_(array) {}
// Constructor. Allows construction from a nullptr.
scoped_ptr(decltype(nullptr)) : impl_(nullptr) {}
// Constructor. Allows construction from a scoped_ptr rvalue.
scoped_ptr(scoped_ptr&& other) : impl_(&other.impl_) {}
// operator=. Allows assignment from a scoped_ptr rvalue.
scoped_ptr& operator=(scoped_ptr&& rhs) {
impl_.TakeState(&rhs.impl_);
return *this;
}
// operator=. Allows assignment from a nullptr. Deletes the currently owned
// array, if any.
scoped_ptr& operator=(decltype(nullptr)) {
reset();
return *this;
}
// Deleted copy constructor and copy assignment, to make the type move-only.
scoped_ptr(const scoped_ptr& other) = delete;
scoped_ptr& operator=(const scoped_ptr& other) = delete;
// Get an rvalue reference. (sp.Pass() does the same thing as std::move(sp).)
scoped_ptr&& Pass() { return static_cast<scoped_ptr&&>(*this); }
// Reset. Deletes the currently owned array, if any.
// Then takes ownership of a new object, if given.
void reset(element_type* array = nullptr) { impl_.reset(array); }
// Accessors to get the owned array.
element_type& operator[](size_t i) const {
assert(impl_.get() != nullptr);
return impl_.get()[i];
}
element_type* get() const { return impl_.get(); }
// Access to the deleter.
deleter_type& get_deleter() { return impl_.get_deleter(); }
const deleter_type& get_deleter() const { return impl_.get_deleter(); }
// Allow scoped_ptr<element_type> to be used in boolean expressions, but not
// implicitly convertible to a real bool (which is dangerous).
private:
typedef rtc::internal::scoped_ptr_impl<element_type, deleter_type>
scoped_ptr::*Testable;
public:
operator Testable() const {
return impl_.get() ? &scoped_ptr::impl_ : nullptr;
}
// Comparison operators.
// These return whether two scoped_ptr refer to the same object, not just to
// two different but equal objects.
bool operator==(element_type* array) const { return impl_.get() == array; }
bool operator!=(element_type* array) const { return impl_.get() != array; }
// Swap two scoped pointers.
void swap(scoped_ptr& p2) {
impl_.swap(p2.impl_);
}
// Release a pointer.
// The return value is the current pointer held by this object. If this object
// holds a nullptr, the return value is nullptr. After this operation, this
// object will hold a nullptr, and will not own the object any more.
element_type* release() WARN_UNUSED_RESULT {
return impl_.release();
}
// Delete the currently held pointer and return a pointer
// to allow overwriting of the current pointer address.
element_type** accept() WARN_UNUSED_RESULT {
return impl_.accept();
}
// Return a pointer to the current pointer address.
element_type** use() WARN_UNUSED_RESULT {
return impl_.use();
}
private:
// Force element_type to be a complete type.
enum { type_must_be_complete = sizeof(element_type) };
// Actually hold the data.
rtc::internal::scoped_ptr_impl<element_type, deleter_type> impl_;
// Disable initialization from any type other than element_type*, by
// providing a constructor that matches such an initialization, but is
// private and has no definition. This is disabled because it is not safe to
// call delete[] on an array whose static type does not match its dynamic
// type.
template <typename U> explicit scoped_ptr(U* array);
explicit scoped_ptr(int disallow_construction_from_null);
// Disable reset() from any type other than element_type*, for the same
// reasons as the constructor above.
template <typename U> void reset(U* array);
void reset(int disallow_reset_from_null);
// Forbid comparison of scoped_ptr types. If U != T, it totally
// doesn't make sense, and if U == T, it still doesn't make sense
// because you should never have the same object owned by two different
// scoped_ptrs.
template <class U> bool operator==(scoped_ptr<U> const& p2) const;
template <class U> bool operator!=(scoped_ptr<U> const& p2) const;
};
template <class T, class D>
void swap(rtc::scoped_ptr<T, D>& p1, rtc::scoped_ptr<T, D>& p2) {
p1.swap(p2);
}
} // namespace rtc
template <class T, class D>
bool operator==(T* p1, const rtc::scoped_ptr<T, D>& p2) {
return p1 == p2.get();
}
template <class T, class D>
bool operator!=(T* p1, const rtc::scoped_ptr<T, D>& p2) {
return p1 != p2.get();
}
// A function to convert T* into scoped_ptr<T>
// Doing e.g. make_scoped_ptr(new FooBarBaz<type>(arg)) is a shorter notation
// for scoped_ptr<FooBarBaz<type> >(new FooBarBaz<type>(arg))
template <typename T>
rtc::scoped_ptr<T> rtc_make_scoped_ptr(T* ptr) {
return rtc::scoped_ptr<T>(ptr);
}
#endif // #ifndef WEBRTC_BASE_SCOPED_PTR_H__

View File

@ -1,133 +0,0 @@
/*
* Copyright 2004 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/base/checks.h"
#include "webrtc/base/stringutils.h"
namespace rtc {
bool memory_check(const void* memory, int c, size_t count) {
const char* char_memory = static_cast<const char*>(memory);
char char_c = static_cast<char>(c);
for (size_t i = 0; i < count; ++i) {
if (char_memory[i] != char_c) {
return false;
}
}
return true;
}
bool string_match(const char* target, const char* pattern) {
while (*pattern) {
if (*pattern == '*') {
if (!*++pattern) {
return true;
}
while (*target) {
if ((toupper(*pattern) == toupper(*target))
&& string_match(target + 1, pattern + 1)) {
return true;
}
++target;
}
return false;
} else {
if (toupper(*pattern) != toupper(*target)) {
return false;
}
++target;
++pattern;
}
}
return !*target;
}
#if defined(WEBRTC_WIN)
int ascii_string_compare(const wchar_t* s1, const char* s2, size_t n,
CharacterTransformation transformation) {
wchar_t c1, c2;
while (true) {
if (n-- == 0) return 0;
c1 = transformation(*s1);
// Double check that characters are not UTF-8
RTC_DCHECK_LT(static_cast<unsigned char>(*s2), 128);
// Note: *s2 gets implicitly promoted to wchar_t
c2 = transformation(*s2);
if (c1 != c2) return (c1 < c2) ? -1 : 1;
if (!c1) return 0;
++s1;
++s2;
}
}
size_t asccpyn(wchar_t* buffer, size_t buflen,
const char* source, size_t srclen) {
if (buflen <= 0)
return 0;
if (srclen == SIZE_UNKNOWN) {
srclen = strlenn(source, buflen - 1);
} else if (srclen >= buflen) {
srclen = buflen - 1;
}
#if !defined(NDEBUG)
// Double check that characters are not UTF-8
for (size_t pos = 0; pos < srclen; ++pos)
RTC_DCHECK_LT(static_cast<unsigned char>(source[pos]), 128);
#endif
std::copy(source, source + srclen, buffer);
buffer[srclen] = 0;
return srclen;
}
#endif // WEBRTC_WIN
void replace_substrs(const char *search,
size_t search_len,
const char *replace,
size_t replace_len,
std::string *s) {
size_t pos = 0;
while ((pos = s->find(search, pos, search_len)) != std::string::npos) {
s->replace(pos, search_len, replace, replace_len);
pos += replace_len;
}
}
bool starts_with(const char *s1, const char *s2) {
return strncmp(s1, s2, strlen(s2)) == 0;
}
bool ends_with(const char *s1, const char *s2) {
size_t s1_length = strlen(s1);
size_t s2_length = strlen(s2);
if (s2_length > s1_length) {
return false;
}
const char* start = s1 + (s1_length - s2_length);
return strncmp(start, s2, s2_length) == 0;
}
static const char kWhitespace[] = " \n\r\t";
std::string string_trim(const std::string& s) {
std::string::size_type first = s.find_first_not_of(kWhitespace);
std::string::size_type last = s.find_last_not_of(kWhitespace);
if (first == std::string::npos || last == std::string::npos) {
return std::string("");
}
return s.substr(first, last - first + 1);
}
} // namespace rtc

View File

@ -1,318 +0,0 @@
/*
* Copyright 2004 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_BASE_STRINGUTILS_H__
#define WEBRTC_BASE_STRINGUTILS_H__
#include <ctype.h>
#include <stdarg.h>
#include <stdio.h>
#include <string.h>
#if defined(WEBRTC_WIN)
#include <malloc.h>
#include <wchar.h>
#define alloca _alloca
#endif // WEBRTC_WIN
#if defined(WEBRTC_POSIX)
#ifdef BSD
#include <stdlib.h>
#else // BSD
#include <alloca.h>
#endif // !BSD
#endif // WEBRTC_POSIX
#include <string>
#include "webrtc/base/basictypes.h"
///////////////////////////////////////////////////////////////////////////////
// Generic string/memory utilities
///////////////////////////////////////////////////////////////////////////////
#define STACK_ARRAY(TYPE, LEN) static_cast<TYPE*>(::alloca((LEN)*sizeof(TYPE)))
namespace rtc {
// Complement to memset. Verifies memory consists of count bytes of value c.
bool memory_check(const void* memory, int c, size_t count);
// Determines whether the simple wildcard pattern matches target.
// Alpha characters in pattern match case-insensitively.
// Asterisks in pattern match 0 or more characters.
// Ex: string_match("www.TEST.GOOGLE.COM", "www.*.com") -> true
bool string_match(const char* target, const char* pattern);
} // namespace rtc
///////////////////////////////////////////////////////////////////////////////
// Rename a bunch of common string functions so they are consistent across
// platforms and between char and wchar_t variants.
// Here is the full list of functions that are unified:
// strlen, strcmp, stricmp, strncmp, strnicmp
// strchr, vsnprintf, strtoul, tolowercase
// tolowercase is like tolower, but not compatible with end-of-file value
//
// It's not clear if we will ever use wchar_t strings on unix. In theory,
// all strings should be Utf8 all the time, except when interfacing with Win32
// APIs that require Utf16.
///////////////////////////////////////////////////////////////////////////////
inline char tolowercase(char c) {
return static_cast<char>(tolower(c));
}
#if defined(WEBRTC_WIN)
inline size_t strlen(const wchar_t* s) {
return wcslen(s);
}
inline int strcmp(const wchar_t* s1, const wchar_t* s2) {
return wcscmp(s1, s2);
}
inline int stricmp(const wchar_t* s1, const wchar_t* s2) {
return _wcsicmp(s1, s2);
}
inline int strncmp(const wchar_t* s1, const wchar_t* s2, size_t n) {
return wcsncmp(s1, s2, n);
}
inline int strnicmp(const wchar_t* s1, const wchar_t* s2, size_t n) {
return _wcsnicmp(s1, s2, n);
}
inline const wchar_t* strchr(const wchar_t* s, wchar_t c) {
return wcschr(s, c);
}
inline const wchar_t* strstr(const wchar_t* haystack, const wchar_t* needle) {
return wcsstr(haystack, needle);
}
#ifndef vsnprintf
inline int vsnprintf(wchar_t* buf, size_t n, const wchar_t* fmt, va_list args) {
return _vsnwprintf(buf, n, fmt, args);
}
#endif // !vsnprintf
inline unsigned long strtoul(const wchar_t* snum, wchar_t** end, int base) {
return wcstoul(snum, end, base);
}
inline wchar_t tolowercase(wchar_t c) {
return static_cast<wchar_t>(towlower(c));
}
#endif // WEBRTC_WIN
#if defined(WEBRTC_POSIX)
inline int _stricmp(const char* s1, const char* s2) {
return strcasecmp(s1, s2);
}
inline int _strnicmp(const char* s1, const char* s2, size_t n) {
return strncasecmp(s1, s2, n);
}
#endif // WEBRTC_POSIX
///////////////////////////////////////////////////////////////////////////////
// Traits simplifies porting string functions to be CTYPE-agnostic
///////////////////////////////////////////////////////////////////////////////
namespace rtc {
const size_t SIZE_UNKNOWN = static_cast<size_t>(-1);
template<class CTYPE>
struct Traits {
// STL string type
//typedef XXX string;
// Null-terminated string
//inline static const CTYPE* empty_str();
};
///////////////////////////////////////////////////////////////////////////////
// String utilities which work with char or wchar_t
///////////////////////////////////////////////////////////////////////////////
template<class CTYPE>
inline const CTYPE* nonnull(const CTYPE* str, const CTYPE* def_str = NULL) {
return str ? str : (def_str ? def_str : Traits<CTYPE>::empty_str());
}
template<class CTYPE>
const CTYPE* strchr(const CTYPE* str, const CTYPE* chs) {
for (size_t i=0; str[i]; ++i) {
for (size_t j=0; chs[j]; ++j) {
if (str[i] == chs[j]) {
return str + i;
}
}
}
return 0;
}
template<class CTYPE>
const CTYPE* strchrn(const CTYPE* str, size_t slen, CTYPE ch) {
for (size_t i=0; i<slen && str[i]; ++i) {
if (str[i] == ch) {
return str + i;
}
}
return 0;
}
template<class CTYPE>
size_t strlenn(const CTYPE* buffer, size_t buflen) {
size_t bufpos = 0;
while (buffer[bufpos] && (bufpos < buflen)) {
++bufpos;
}
return bufpos;
}
// Safe versions of strncpy, strncat, snprintf and vsnprintf that always
// null-terminate.
template<class CTYPE>
size_t strcpyn(CTYPE* buffer, size_t buflen,
const CTYPE* source, size_t srclen = SIZE_UNKNOWN) {
if (buflen <= 0)
return 0;
if (srclen == SIZE_UNKNOWN) {
srclen = strlenn(source, buflen - 1);
} else if (srclen >= buflen) {
srclen = buflen - 1;
}
memcpy(buffer, source, srclen * sizeof(CTYPE));
buffer[srclen] = 0;
return srclen;
}
template<class CTYPE>
size_t strcatn(CTYPE* buffer, size_t buflen,
const CTYPE* source, size_t srclen = SIZE_UNKNOWN) {
if (buflen <= 0)
return 0;
size_t bufpos = strlenn(buffer, buflen - 1);
return bufpos + strcpyn(buffer + bufpos, buflen - bufpos, source, srclen);
}
// Some compilers (clang specifically) require vsprintfn be defined before
// sprintfn.
template<class CTYPE>
size_t vsprintfn(CTYPE* buffer, size_t buflen, const CTYPE* format,
va_list args) {
int len = vsnprintf(buffer, buflen, format, args);
if ((len < 0) || (static_cast<size_t>(len) >= buflen)) {
len = static_cast<int>(buflen - 1);
buffer[len] = 0;
}
return len;
}
template<class CTYPE>
size_t sprintfn(CTYPE* buffer, size_t buflen, const CTYPE* format, ...);
template<class CTYPE>
size_t sprintfn(CTYPE* buffer, size_t buflen, const CTYPE* format, ...) {
va_list args;
va_start(args, format);
size_t len = vsprintfn(buffer, buflen, format, args);
va_end(args);
return len;
}
///////////////////////////////////////////////////////////////////////////////
// Allow safe comparing and copying ascii (not UTF-8) with both wide and
// non-wide character strings.
///////////////////////////////////////////////////////////////////////////////
inline int asccmp(const char* s1, const char* s2) {
return strcmp(s1, s2);
}
inline int ascicmp(const char* s1, const char* s2) {
return _stricmp(s1, s2);
}
inline int ascncmp(const char* s1, const char* s2, size_t n) {
return strncmp(s1, s2, n);
}
inline int ascnicmp(const char* s1, const char* s2, size_t n) {
return _strnicmp(s1, s2, n);
}
inline size_t asccpyn(char* buffer, size_t buflen,
const char* source, size_t srclen = SIZE_UNKNOWN) {
return strcpyn(buffer, buflen, source, srclen);
}
#if defined(WEBRTC_WIN)
typedef wchar_t(*CharacterTransformation)(wchar_t);
inline wchar_t identity(wchar_t c) { return c; }
int ascii_string_compare(const wchar_t* s1, const char* s2, size_t n,
CharacterTransformation transformation);
inline int asccmp(const wchar_t* s1, const char* s2) {
return ascii_string_compare(s1, s2, static_cast<size_t>(-1), identity);
}
inline int ascicmp(const wchar_t* s1, const char* s2) {
return ascii_string_compare(s1, s2, static_cast<size_t>(-1), tolowercase);
}
inline int ascncmp(const wchar_t* s1, const char* s2, size_t n) {
return ascii_string_compare(s1, s2, n, identity);
}
inline int ascnicmp(const wchar_t* s1, const char* s2, size_t n) {
return ascii_string_compare(s1, s2, n, tolowercase);
}
size_t asccpyn(wchar_t* buffer, size_t buflen,
const char* source, size_t srclen = SIZE_UNKNOWN);
#endif // WEBRTC_WIN
///////////////////////////////////////////////////////////////////////////////
// Traits<char> specializations
///////////////////////////////////////////////////////////////////////////////
template<>
struct Traits<char> {
typedef std::string string;
inline static const char* empty_str() { return ""; }
};
///////////////////////////////////////////////////////////////////////////////
// Traits<wchar_t> specializations (Windows only, currently)
///////////////////////////////////////////////////////////////////////////////
#if defined(WEBRTC_WIN)
template<>
struct Traits<wchar_t> {
typedef std::wstring string;
inline static const wchar_t* empty_str() { return L""; }
};
#endif // WEBRTC_WIN
// Replaces all occurrences of "search" with "replace".
void replace_substrs(const char *search,
size_t search_len,
const char *replace,
size_t replace_len,
std::string *s);
// True iff s1 starts with s2.
bool starts_with(const char *s1, const char *s2);
// True iff s1 ends with s2.
bool ends_with(const char *s1, const char *s2);
// Remove leading and trailing whitespaces.
std::string string_trim(const std::string& s);
} // namespace rtc
#endif // WEBRTC_BASE_STRINGUTILS_H__

View File

@ -1,127 +0,0 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// Borrowed from Chromium's src/base/template_util.h.
#ifndef WEBRTC_BASE_TEMPLATE_UTIL_H_
#define WEBRTC_BASE_TEMPLATE_UTIL_H_
#include <stddef.h> // For size_t.
namespace rtc {
// Template definitions from tr1.
template<class T, T v>
struct integral_constant {
static const T value = v;
typedef T value_type;
typedef integral_constant<T, v> type;
};
template <class T, T v> const T integral_constant<T, v>::value;
typedef integral_constant<bool, true> true_type;
typedef integral_constant<bool, false> false_type;
template <class T> struct is_pointer : false_type {};
template <class T> struct is_pointer<T*> : true_type {};
template <class T, class U> struct is_same : public false_type {};
template <class T> struct is_same<T, T> : true_type {};
template<class> struct is_array : public false_type {};
template<class T, size_t n> struct is_array<T[n]> : public true_type {};
template<class T> struct is_array<T[]> : public true_type {};
template <class T> struct is_non_const_reference : false_type {};
template <class T> struct is_non_const_reference<T&> : true_type {};
template <class T> struct is_non_const_reference<const T&> : false_type {};
template <class T> struct is_void : false_type {};
template <> struct is_void<void> : true_type {};
template <class T>
struct remove_reference {
typedef T type;
};
template <class T>
struct remove_reference<T&> {
typedef T type;
};
template <class T>
struct remove_reference<T&&> {
typedef T type;
};
namespace internal {
// Types YesType and NoType are guaranteed such that sizeof(YesType) <
// sizeof(NoType).
typedef char YesType;
struct NoType {
YesType dummy[2];
};
// This class is an implementation detail for is_convertible, and you
// don't need to know how it works to use is_convertible. For those
// who care: we declare two different functions, one whose argument is
// of type To and one with a variadic argument list. We give them
// return types of different size, so we can use sizeof to trick the
// compiler into telling us which function it would have chosen if we
// had called it with an argument of type From. See Alexandrescu's
// _Modern C++ Design_ for more details on this sort of trick.
struct ConvertHelper {
template <typename To>
static YesType Test(To);
template <typename To>
static NoType Test(...);
template <typename From>
static From& Create();
};
// Used to determine if a type is a struct/union/class. Inspired by Boost's
// is_class type_trait implementation.
struct IsClassHelper {
template <typename C>
static YesType Test(void(C::*)(void));
template <typename C>
static NoType Test(...);
};
} // namespace internal
// Inherits from true_type if From is convertible to To, false_type otherwise.
//
// Note that if the type is convertible, this will be a true_type REGARDLESS
// of whether or not the conversion would emit a warning.
template <typename From, typename To>
struct is_convertible
: integral_constant<bool,
sizeof(internal::ConvertHelper::Test<To>(
internal::ConvertHelper::Create<From>())) ==
sizeof(internal::YesType)> {
};
template <typename T>
struct is_class
: integral_constant<bool,
sizeof(internal::IsClassHelper::Test<T>(0)) ==
sizeof(internal::YesType)> {
};
} // namespace rtc
#endif // WEBRTC_BASE_TEMPLATE_UTIL_H_

View File

@ -1,99 +0,0 @@
//
// Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
//
// Use of this source code is governed by a BSD-style license
// that can be found in the LICENSE file in the root of the source
// tree. An additional intellectual property rights grant can be found
// in the file PATENTS. All contributing project authors may
// be found in the AUTHORS file in the root of the source tree.
//
// Borrowed from
// https://code.google.com/p/gperftools/source/browse/src/base/thread_annotations.h
// but adapted for clang attributes instead of the gcc.
//
// This header file contains the macro definitions for thread safety
// annotations that allow the developers to document the locking policies
// of their multi-threaded code. The annotations can also help program
// analysis tools to identify potential thread safety issues.
#ifndef BASE_THREAD_ANNOTATIONS_H_
#define BASE_THREAD_ANNOTATIONS_H_
#if defined(__clang__) && (!defined(SWIG))
#define THREAD_ANNOTATION_ATTRIBUTE__(x) __attribute__((x))
#else
#define THREAD_ANNOTATION_ATTRIBUTE__(x) // no-op
#endif
// Document if a shared variable/field needs to be protected by a lock.
// GUARDED_BY allows the user to specify a particular lock that should be
// held when accessing the annotated variable, while GUARDED_VAR only
// indicates a shared variable should be guarded (by any lock). GUARDED_VAR
// is primarily used when the client cannot express the name of the lock.
#define GUARDED_BY(x) THREAD_ANNOTATION_ATTRIBUTE__(guarded_by(x))
#define GUARDED_VAR THREAD_ANNOTATION_ATTRIBUTE__(guarded)
// Document if the memory location pointed to by a pointer should be guarded
// by a lock when dereferencing the pointer. Similar to GUARDED_VAR,
// PT_GUARDED_VAR is primarily used when the client cannot express the name
// of the lock. Note that a pointer variable to a shared memory location
// could itself be a shared variable. For example, if a shared global pointer
// q, which is guarded by mu1, points to a shared memory location that is
// guarded by mu2, q should be annotated as follows:
// int *q GUARDED_BY(mu1) PT_GUARDED_BY(mu2);
#define PT_GUARDED_BY(x) THREAD_ANNOTATION_ATTRIBUTE__(point_to_guarded_by(x))
#define PT_GUARDED_VAR THREAD_ANNOTATION_ATTRIBUTE__(point_to_guarded)
// Document the acquisition order between locks that can be held
// simultaneously by a thread. For any two locks that need to be annotated
// to establish an acquisition order, only one of them needs the annotation.
// (i.e. You don't have to annotate both locks with both ACQUIRED_AFTER
// and ACQUIRED_BEFORE.)
#define ACQUIRED_AFTER(x) THREAD_ANNOTATION_ATTRIBUTE__(acquired_after(x))
#define ACQUIRED_BEFORE(x) THREAD_ANNOTATION_ATTRIBUTE__(acquired_before(x))
// The following three annotations document the lock requirements for
// functions/methods.
// Document if a function expects certain locks to be held before it is called
#define EXCLUSIVE_LOCKS_REQUIRED(...) \
THREAD_ANNOTATION_ATTRIBUTE__(exclusive_locks_required(__VA_ARGS__))
#define SHARED_LOCKS_REQUIRED(...) \
THREAD_ANNOTATION_ATTRIBUTE__(shared_locks_required(__VA_ARGS__))
// Document the locks acquired in the body of the function. These locks
// cannot be held when calling this function (as google3's Mutex locks are
// non-reentrant).
#define LOCKS_EXCLUDED(x) THREAD_ANNOTATION_ATTRIBUTE__(locks_excluded(x))
// Document the lock the annotated function returns without acquiring it.
#define LOCK_RETURNED(x) THREAD_ANNOTATION_ATTRIBUTE__(lock_returned(x))
// Document if a class/type is a lockable type (such as the Mutex class).
#define LOCKABLE THREAD_ANNOTATION_ATTRIBUTE__(lockable)
// Document if a class is a scoped lockable type (such as the MutexLock class).
#define SCOPED_LOCKABLE THREAD_ANNOTATION_ATTRIBUTE__(scoped_lockable)
// The following annotations specify lock and unlock primitives.
#define EXCLUSIVE_LOCK_FUNCTION(...) \
THREAD_ANNOTATION_ATTRIBUTE__(exclusive_lock_function(__VA_ARGS__))
#define SHARED_LOCK_FUNCTION(...) \
THREAD_ANNOTATION_ATTRIBUTE__(shared_lock_function(__VA_ARGS__))
#define EXCLUSIVE_TRYLOCK_FUNCTION(...) \
THREAD_ANNOTATION_ATTRIBUTE__(exclusive_trylock_function(__VA_ARGS__))
#define SHARED_TRYLOCK_FUNCTION(...) \
THREAD_ANNOTATION_ATTRIBUTE__(shared_trylock_function(__VA_ARGS__))
#define UNLOCK_FUNCTION(...) \
THREAD_ANNOTATION_ATTRIBUTE__(unlock_function(__VA_ARGS__))
// An escape hatch for thread safety analysis to ignore the annotated function.
#define NO_THREAD_SAFETY_ANALYSIS \
THREAD_ANNOTATION_ATTRIBUTE__(no_thread_safety_analysis)
#endif // BASE_THREAD_ANNOTATIONS_H_

View File

@ -1,91 +0,0 @@
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// Borrowed from Chromium's src/base/threading/thread_checker.h.
#ifndef WEBRTC_BASE_THREAD_CHECKER_H_
#define WEBRTC_BASE_THREAD_CHECKER_H_
// Apart from debug builds, we also enable the thread checker in
// builds with DCHECK_ALWAYS_ON so that trybots and waterfall bots
// with this define will get the same level of thread checking as
// debug bots.
//
// Note that this does not perfectly match situations where RTC_DCHECK is
// enabled. For example a non-official release build may have
// DCHECK_ALWAYS_ON undefined (and therefore ThreadChecker would be
// disabled) but have RTC_DCHECKs enabled at runtime.
#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON))
#define ENABLE_THREAD_CHECKER 1
#else
#define ENABLE_THREAD_CHECKER 0
#endif
#include "webrtc/base/thread_checker_impl.h"
namespace rtc {
// Do nothing implementation, for use in release mode.
//
// Note: You should almost always use the ThreadChecker class to get the
// right version for your build configuration.
class ThreadCheckerDoNothing {
public:
bool CalledOnValidThread() const {
return true;
}
void DetachFromThread() {}
};
// ThreadChecker is a helper class used to help verify that some methods of a
// class are called from the same thread. It provides identical functionality to
// base::NonThreadSafe, but it is meant to be held as a member variable, rather
// than inherited from base::NonThreadSafe.
//
// While inheriting from base::NonThreadSafe may give a clear indication about
// the thread-safety of a class, it may also lead to violations of the style
// guide with regard to multiple inheritance. The choice between having a
// ThreadChecker member and inheriting from base::NonThreadSafe should be based
// on whether:
// - Derived classes need to know the thread they belong to, as opposed to
// having that functionality fully encapsulated in the base class.
// - Derived classes should be able to reassign the base class to another
// thread, via DetachFromThread.
//
// If neither of these are true, then having a ThreadChecker member and calling
// CalledOnValidThread is the preferable solution.
//
// Example:
// class MyClass {
// public:
// void Foo() {
// RTC_DCHECK(thread_checker_.CalledOnValidThread());
// ... (do stuff) ...
// }
//
// private:
// ThreadChecker thread_checker_;
// }
//
// In Release mode, CalledOnValidThread will always return true.
#if ENABLE_THREAD_CHECKER
class ThreadChecker : public ThreadCheckerImpl {
};
#else
class ThreadChecker : public ThreadCheckerDoNothing {
};
#endif // ENABLE_THREAD_CHECKER
#undef ENABLE_THREAD_CHECKER
} // namespace rtc
#endif // WEBRTC_BASE_THREAD_CHECKER_H_

View File

@ -1,36 +0,0 @@
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// Borrowed from Chromium's src/base/threading/thread_checker_impl.cc.
#include "webrtc/base/thread_checker_impl.h"
namespace rtc {
ThreadCheckerImpl::ThreadCheckerImpl() : valid_thread_(CurrentThreadRef()) {
}
ThreadCheckerImpl::~ThreadCheckerImpl() {
}
bool ThreadCheckerImpl::CalledOnValidThread() const {
const PlatformThreadRef current_thread = CurrentThreadRef();
CritScope scoped_lock(&lock_);
if (!valid_thread_) // Set if previously detached.
valid_thread_ = current_thread;
return IsThreadRefEqual(valid_thread_, current_thread);
}
void ThreadCheckerImpl::DetachFromThread() {
CritScope scoped_lock(&lock_);
valid_thread_ = 0;
}
} // namespace rtc

View File

@ -1,48 +0,0 @@
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// Borrowed from Chromium's src/base/threading/thread_checker_impl.h.
#ifndef WEBRTC_BASE_THREAD_CHECKER_IMPL_H_
#define WEBRTC_BASE_THREAD_CHECKER_IMPL_H_
#include "webrtc/base/criticalsection.h"
#include "webrtc/base/platform_thread.h"
namespace rtc {
// Real implementation of ThreadChecker, for use in debug mode, or
// for temporary use in release mode (e.g. to RTC_CHECK on a threading issue
// seen only in the wild).
//
// Note: You should almost always use the ThreadChecker class to get the
// right version for your build configuration.
class ThreadCheckerImpl {
public:
ThreadCheckerImpl();
~ThreadCheckerImpl();
bool CalledOnValidThread() const;
// Changes the thread that is checked for in CalledOnValidThread. This may
// be useful when an object may be created on one thread and then used
// exclusively on another thread.
void DetachFromThread();
private:
mutable CriticalSection lock_;
// This is mutable so that CalledOnValidThread can set it.
// It's guarded by |lock_|.
mutable PlatformThreadRef valid_thread_;
};
} // namespace rtc
#endif // WEBRTC_BASE_THREAD_CHECKER_IMPL_H_

View File

@ -1,103 +0,0 @@
/*
* Copyright 2004 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_BASE_WIN32_H_
#define WEBRTC_BASE_WIN32_H_
#if defined(WEBRTC_WIN)
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
// Make sure we don't get min/max macros
#ifndef NOMINMAX
#define NOMINMAX
#endif
#include <winsock2.h>
#include <windows.h>
#ifndef SECURITY_MANDATORY_LABEL_AUTHORITY
// Add defines that we use if we are compiling against older sdks
#define SECURITY_MANDATORY_MEDIUM_RID (0x00002000L)
#define TokenIntegrityLevel static_cast<TOKEN_INFORMATION_CLASS>(0x19)
typedef struct _TOKEN_MANDATORY_LABEL {
SID_AND_ATTRIBUTES Label;
} TOKEN_MANDATORY_LABEL, *PTOKEN_MANDATORY_LABEL;
#endif // SECURITY_MANDATORY_LABEL_AUTHORITY
#undef SetPort
#include <string>
#include "webrtc/base/stringutils.h"
#include "webrtc/base/basictypes.h"
namespace rtc {
const char* win32_inet_ntop(int af, const void *src, char* dst, socklen_t size);
int win32_inet_pton(int af, const char* src, void *dst);
inline std::wstring ToUtf16(const char* utf8, size_t len) {
int len16 = ::MultiByteToWideChar(CP_UTF8, 0, utf8, static_cast<int>(len),
NULL, 0);
wchar_t* ws = STACK_ARRAY(wchar_t, len16);
::MultiByteToWideChar(CP_UTF8, 0, utf8, static_cast<int>(len), ws, len16);
return std::wstring(ws, len16);
}
inline std::wstring ToUtf16(const std::string& str) {
return ToUtf16(str.data(), str.length());
}
inline std::string ToUtf8(const wchar_t* wide, size_t len) {
int len8 = ::WideCharToMultiByte(CP_UTF8, 0, wide, static_cast<int>(len),
NULL, 0, NULL, NULL);
char* ns = STACK_ARRAY(char, len8);
::WideCharToMultiByte(CP_UTF8, 0, wide, static_cast<int>(len), ns, len8,
NULL, NULL);
return std::string(ns, len8);
}
inline std::string ToUtf8(const wchar_t* wide) {
return ToUtf8(wide, wcslen(wide));
}
inline std::string ToUtf8(const std::wstring& wstr) {
return ToUtf8(wstr.data(), wstr.length());
}
// Convert FILETIME to time_t
void FileTimeToUnixTime(const FILETIME& ft, time_t* ut);
// Convert time_t to FILETIME
void UnixTimeToFileTime(const time_t& ut, FILETIME * ft);
// Convert a Utf8 path representation to a non-length-limited Unicode pathname.
bool Utf8ToWindowsFilename(const std::string& utf8, std::wstring* filename);
// Convert a FILETIME to a UInt64
inline uint64_t ToUInt64(const FILETIME& ft) {
ULARGE_INTEGER r = {{ft.dwLowDateTime, ft.dwHighDateTime}};
return r.QuadPart;
}
enum WindowsMajorVersions {
kWindows2000 = 5,
kWindowsVista = 6,
};
bool GetOsVersion(int* major, int* minor, int* build);
inline bool IsWindowsVistaOrLater() {
int major;
return (GetOsVersion(&major, NULL, NULL) && major >= kWindowsVista);
}
inline bool IsWindowsXpOrLater() {
int major, minor;
return (GetOsVersion(&major, &minor, NULL) &&
(major >= kWindowsVista ||
(major == kWindows2000 && minor >= 1)));
}
inline bool IsWindows8OrLater() {
int major, minor;
return (GetOsVersion(&major, &minor, NULL) &&
(major > kWindowsVista ||
(major == kWindowsVista && minor >= 2)));
}
// Determine the current integrity level of the process.
bool GetCurrentProcessIntegrityLevel(int* level);
inline bool IsCurrentProcessLowIntegrity() {
int level;
return (GetCurrentProcessIntegrityLevel(&level) &&
level < SECURITY_MANDATORY_MEDIUM_RID);
}
bool AdjustCurrentProcessPrivilege(const TCHAR* privilege, bool to_enable);
} // namespace rtc
#endif // WEBRTC_WIN
#endif // WEBRTC_BASE_WIN32_H_

View File

@ -6,37 +6,19 @@
# in the file PATENTS. All contributing project authors may # in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree. # be found in the AUTHORS file in the root of the source tree.
import("//build/config/arm.gni") import("../webrtc.gni")
import("../build/webrtc.gni")
config("common_audio_config") { visibility = [ ":*" ]
include_dirs = [
"resampler/include",
"signal_processing/include",
"vad/include",
]
}
source_set("common_audio") { rtc_library("common_audio") {
visibility += [ "*" ]
sources = [ sources = [
"audio_converter.cc", "audio_converter.cc",
"audio_converter.h", "audio_converter.h",
"audio_ring_buffer.cc",
"audio_ring_buffer.h",
"audio_util.cc", "audio_util.cc",
"blocker.cc",
"blocker.h",
"channel_buffer.cc", "channel_buffer.cc",
"channel_buffer.h", "channel_buffer.h",
"fft4g.c",
"fft4g.h",
"fir_filter.cc",
"fir_filter.h",
"fir_filter_neon.h",
"fir_filter_sse.h",
"include/audio_util.h", "include/audio_util.h",
"lapped_transform.cc",
"lapped_transform.h",
"real_fourier.cc", "real_fourier.cc",
"real_fourier.h", "real_fourier.h",
"real_fourier_ooura.cc", "real_fourier_ooura.cc",
@ -48,7 +30,77 @@ source_set("common_audio") {
"resampler/push_sinc_resampler.h", "resampler/push_sinc_resampler.h",
"resampler/resampler.cc", "resampler/resampler.cc",
"resampler/sinc_resampler.cc", "resampler/sinc_resampler.cc",
"resampler/sinc_resampler.h", "smoothing_filter.cc",
"smoothing_filter.h",
"vad/include/vad.h",
"vad/vad.cc",
"wav_file.cc",
"wav_file.h",
"wav_header.cc",
"wav_header.h",
"window_generator.cc",
"window_generator.h",
]
deps = [
":common_audio_c",
":sinc_resampler",
"../api:array_view",
"../rtc_base:checks",
"../rtc_base:gtest_prod",
"../rtc_base:rtc_base_approved",
"../rtc_base:sanitizer",
"../rtc_base/memory:aligned_malloc",
"../rtc_base/system:arch",
"../rtc_base/system:file_wrapper",
"../system_wrappers",
"third_party/ooura:fft_size_256",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
defines = []
if (rtc_build_with_neon) {
deps += [ ":common_audio_neon" ]
}
if (current_cpu == "x86" || current_cpu == "x64") {
deps += [ ":common_audio_sse2" ]
deps += [ ":common_audio_avx2" ]
}
}
rtc_source_set("mock_common_audio") {
visibility += webrtc_default_visibility
testonly = true
sources = [
"mocks/mock_smoothing_filter.h",
"vad/mock/mock_vad.h",
]
deps = [
":common_audio",
"../test:test_support",
]
}
rtc_source_set("common_audio_c_arm_asm") {
sources = []
deps = []
if (current_cpu == "arm") {
sources += [ "signal_processing/complex_bit_reverse_arm.S" ]
if (arm_version >= 7) {
sources += [ "signal_processing/filter_ar_fast_q12_armv7.S" ]
} else {
sources += [ "signal_processing/filter_ar_fast_q12.c" ]
}
deps += [ "../rtc_base/system:asm_defines" ]
}
}
rtc_library("common_audio_c") {
visibility += webrtc_default_visibility
sources = [
"ring_buffer.c", "ring_buffer.c",
"ring_buffer.h", "ring_buffer.h",
"signal_processing/auto_corr_to_refl_coef.c", "signal_processing/auto_corr_to_refl_coef.c",
@ -57,7 +109,6 @@ source_set("common_audio") {
"signal_processing/copy_set_operations.c", "signal_processing/copy_set_operations.c",
"signal_processing/cross_correlation.c", "signal_processing/cross_correlation.c",
"signal_processing/division_operations.c", "signal_processing/division_operations.c",
"signal_processing/dot_product_with_scale.c",
"signal_processing/downsample_fast.c", "signal_processing/downsample_fast.c",
"signal_processing/energy.c", "signal_processing/energy.c",
"signal_processing/filter_ar.c", "signal_processing/filter_ar.c",
@ -68,6 +119,7 @@ source_set("common_audio") {
"signal_processing/include/real_fft.h", "signal_processing/include/real_fft.h",
"signal_processing/include/signal_processing_library.h", "signal_processing/include/signal_processing_library.h",
"signal_processing/include/spl_inl.h", "signal_processing/include/spl_inl.h",
"signal_processing/include/spl_inl_armv7.h",
"signal_processing/levinson_durbin.c", "signal_processing/levinson_durbin.c",
"signal_processing/lpc_to_refl_coef.c", "signal_processing/lpc_to_refl_coef.c",
"signal_processing/min_max_operations.c", "signal_processing/min_max_operations.c",
@ -81,15 +133,12 @@ source_set("common_audio") {
"signal_processing/resample_by_2_internal.h", "signal_processing/resample_by_2_internal.h",
"signal_processing/resample_fractional.c", "signal_processing/resample_fractional.c",
"signal_processing/spl_init.c", "signal_processing/spl_init.c",
"signal_processing/spl_inl.c",
"signal_processing/spl_sqrt.c", "signal_processing/spl_sqrt.c",
"signal_processing/splitting_filter.c", "signal_processing/splitting_filter.c",
"signal_processing/sqrt_of_one_minus_x_squared.c", "signal_processing/sqrt_of_one_minus_x_squared.c",
"signal_processing/vector_scaling_operations.c", "signal_processing/vector_scaling_operations.c",
"sparse_fir_filter.cc",
"sparse_fir_filter.h",
"vad/include/vad.h",
"vad/include/webrtc_vad.h", "vad/include/webrtc_vad.h",
"vad/vad.cc",
"vad/vad_core.c", "vad/vad_core.c",
"vad/vad_core.h", "vad/vad_core.h",
"vad/vad_filterbank.c", "vad/vad_filterbank.c",
@ -99,47 +148,8 @@ source_set("common_audio") {
"vad/vad_sp.c", "vad/vad_sp.c",
"vad/vad_sp.h", "vad/vad_sp.h",
"vad/webrtc_vad.c", "vad/webrtc_vad.c",
"wav_file.cc",
"wav_file.h",
"wav_header.cc",
"wav_header.h",
"window_generator.cc",
"window_generator.h",
] ]
deps = [
"../system_wrappers",
]
defines = []
if (rtc_use_openmax_dl) {
sources += [
"real_fourier_openmax.cc",
"real_fourier_openmax.h",
]
defines += [ "RTC_USE_OPENMAX_DL" ]
if (rtc_build_openmax_dl) {
deps += [ "//third_party/openmax_dl/dl" ]
}
}
if (current_cpu == "arm") {
sources += [
"signal_processing/complex_bit_reverse_arm.S",
"signal_processing/spl_sqrt_floor_arm.S",
]
if (arm_version >= 7) {
sources += [ "signal_processing/filter_ar_fast_q12_armv7.S" ]
} else {
sources += [ "signal_processing/filter_ar_fast_q12.c" ]
}
}
if (rtc_build_with_neon) {
deps += [ ":common_audio_neon" ]
}
if (current_cpu == "mipsel") { if (current_cpu == "mipsel") {
sources += [ sources += [
"signal_processing/complex_bit_reverse_mips.c", "signal_processing/complex_bit_reverse_mips.c",
@ -150,7 +160,6 @@ source_set("common_audio") {
"signal_processing/include/spl_inl_mips.h", "signal_processing/include/spl_inl_mips.h",
"signal_processing/min_max_operations_mips.c", "signal_processing/min_max_operations_mips.c",
"signal_processing/resample_by_2_mips.c", "signal_processing/resample_by_2_mips.c",
"signal_processing/spl_sqrt_floor_mips.c",
] ]
if (mips_dsp_rev > 0) { if (mips_dsp_rev > 0) {
sources += [ "signal_processing/vector_scaling_operations_mips.c" ] sources += [ "signal_processing/vector_scaling_operations_mips.c" ]
@ -163,81 +172,227 @@ source_set("common_audio") {
sources += [ sources += [
"signal_processing/complex_bit_reverse.c", "signal_processing/complex_bit_reverse.c",
"signal_processing/filter_ar_fast_q12.c", "signal_processing/filter_ar_fast_q12.c",
"signal_processing/spl_sqrt_floor.c",
] ]
} }
if (is_win) { deps = [
cflags = [ "/wd4334" ] # Ignore warning on shift operator promotion. ":common_audio_c_arm_asm",
":common_audio_cc",
"../rtc_base:checks",
"../rtc_base:compile_assert_c",
"../rtc_base:rtc_base_approved",
"../rtc_base:sanitizer",
"../rtc_base/system:arch",
"../system_wrappers",
"third_party/ooura:fft_size_256",
"third_party/spl_sqrt_floor",
]
} }
configs += [ "..:common_config" ] rtc_library("common_audio_cc") {
sources = [
public_configs = [ "signal_processing/dot_product_with_scale.cc",
"..:common_inherited_config", "signal_processing/dot_product_with_scale.h",
":common_audio_config",
] ]
if (is_clang) { deps = [
# Suppress warnings from Chrome's Clang plugins. "../rtc_base:rtc_base_approved",
# See http://code.google.com/p/webrtc/issues/detail?id=163 for details. "../system_wrappers",
configs -= [ "//build/config/clang:find_bad_constructs" ] ]
} }
rtc_source_set("sinc_resampler") {
sources = [ "resampler/sinc_resampler.h" ]
deps = [
"../rtc_base:gtest_prod",
"../rtc_base:rtc_base_approved",
"../rtc_base/memory:aligned_malloc",
"../rtc_base/system:arch",
"../system_wrappers",
]
}
rtc_source_set("fir_filter") {
visibility += webrtc_default_visibility
sources = [ "fir_filter.h" ]
}
rtc_library("fir_filter_factory") {
visibility += webrtc_default_visibility
sources = [
"fir_filter_c.cc",
"fir_filter_c.h",
"fir_filter_factory.cc",
"fir_filter_factory.h",
]
deps = [
":fir_filter",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
"../rtc_base/system:arch",
"../system_wrappers",
]
if (current_cpu == "x86" || current_cpu == "x64") { if (current_cpu == "x86" || current_cpu == "x64") {
deps += [ ":common_audio_sse2" ] deps += [ ":common_audio_sse2" ]
deps += [ ":common_audio_avx2" ]
}
if (rtc_build_with_neon) {
deps += [ ":common_audio_neon" ]
} }
} }
if (current_cpu == "x86" || current_cpu == "x64") { if (current_cpu == "x86" || current_cpu == "x64") {
source_set("common_audio_sse2") { rtc_library("common_audio_sse2") {
sources = [ sources = [
"fir_filter_sse.cc", "fir_filter_sse.cc",
"fir_filter_sse.h",
"resampler/sinc_resampler_sse.cc", "resampler/sinc_resampler_sse.cc",
] ]
if (is_posix) { if (is_posix || is_fuchsia) {
cflags = [ "-msse2" ] cflags = [ "-msse2" ]
} }
configs += [ "..:common_inherited_config" ] deps = [
":fir_filter",
if (is_clang) { ":sinc_resampler",
# Suppress warnings from Chrome's Clang plugins. "../rtc_base:checks",
# See http://code.google.com/p/webrtc/issues/detail?id=163 for details. "../rtc_base:rtc_base_approved",
configs -= [ "//build/config/clang:find_bad_constructs" ] "../rtc_base/memory:aligned_malloc",
]
} }
rtc_library("common_audio_avx2") {
sources = [
"fir_filter_avx2.cc",
"fir_filter_avx2.h",
"resampler/sinc_resampler_avx2.cc",
]
if (is_win) {
cflags = [ "/arch:AVX2" ]
} else {
cflags = [
"-mavx2",
"-mfma",
]
}
deps = [
":fir_filter",
":sinc_resampler",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
"../rtc_base/memory:aligned_malloc",
]
} }
} }
if (rtc_build_with_neon) { if (rtc_build_with_neon) {
source_set("common_audio_neon") { rtc_library("common_audio_neon") {
sources = [ sources = [
"fir_filter_neon.cc", "fir_filter_neon.cc",
"fir_filter_neon.h",
"resampler/sinc_resampler_neon.cc", "resampler/sinc_resampler_neon.cc",
]
if (current_cpu != "arm64") {
# Enable compilation for the NEON instruction set.
suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ]
cflags = [ "-mfpu=neon" ]
}
deps = [
":common_audio_neon_c",
":fir_filter",
":sinc_resampler",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
"../rtc_base/memory:aligned_malloc",
]
}
rtc_library("common_audio_neon_c") {
visibility += webrtc_default_visibility
sources = [
"signal_processing/cross_correlation_neon.c", "signal_processing/cross_correlation_neon.c",
"signal_processing/downsample_fast_neon.c", "signal_processing/downsample_fast_neon.c",
"signal_processing/min_max_operations_neon.c", "signal_processing/min_max_operations_neon.c",
] ]
if (current_cpu != "arm64") { if (current_cpu != "arm64") {
# Enable compilation for the NEON instruction set. This is needed # Enable compilation for the NEON instruction set.
# since //build/config/arm.gni only enables NEON for iOS, not Android. suppressed_configs += [ "//build/config/compiler:compiler_arm_fpu" ]
# This provides the same functionality as webrtc/build/arm_neon.gypi.
configs -= [ "//build/config/compiler:compiler_arm_fpu" ]
cflags = [ "-mfpu=neon" ] cflags = [ "-mfpu=neon" ]
} }
# Disable LTO on NEON targets due to compiler bug. deps = [
# TODO(fdegans): Enable this. See crbug.com/408997. ":common_audio_c",
if (rtc_use_lto) { "../rtc_base:checks",
cflags -= [ "../rtc_base:rtc_base_approved",
"-flto", "../rtc_base/system:arch",
"-ffat-lto-objects",
] ]
} }
}
configs += [ "..:common_config" ] if (rtc_include_tests) {
public_configs = [ "..:common_inherited_config" ] rtc_test("common_audio_unittests") {
visibility += webrtc_default_visibility
testonly = true
sources = [
"audio_converter_unittest.cc",
"audio_util_unittest.cc",
"channel_buffer_unittest.cc",
"fir_filter_unittest.cc",
"real_fourier_unittest.cc",
"resampler/push_resampler_unittest.cc",
"resampler/push_sinc_resampler_unittest.cc",
"resampler/resampler_unittest.cc",
"resampler/sinusoidal_linear_chirp_source.cc",
"resampler/sinusoidal_linear_chirp_source.h",
"ring_buffer_unittest.cc",
"signal_processing/real_fft_unittest.cc",
"signal_processing/signal_processing_unittest.cc",
"smoothing_filter_unittest.cc",
"vad/vad_core_unittest.cc",
"vad/vad_filterbank_unittest.cc",
"vad/vad_gmm_unittest.cc",
"vad/vad_sp_unittest.cc",
"vad/vad_unittest.cc",
"vad/vad_unittest.h",
"wav_file_unittest.cc",
"wav_header_unittest.cc",
"window_generator_unittest.cc",
]
# Does not compile on iOS for arm: webrtc:5544.
if (!is_ios || target_cpu != "arm") {
sources += [ "resampler/sinc_resampler_unittest.cc" ]
}
deps = [
":common_audio",
":common_audio_c",
":fir_filter",
":fir_filter_factory",
":sinc_resampler",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
"../rtc_base:rtc_base_tests_utils",
"../rtc_base/system:arch",
"../system_wrappers",
"../test:fileutils",
"../test:rtc_expect_death",
"../test:test_main",
"../test:test_support",
"//testing/gtest",
]
if (is_android) {
deps += [ "//testing/android/native_test:native_test_support" ]
shard_timeout = 900
}
} }
} }

View File

@ -8,32 +8,36 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include "webrtc/common_audio/audio_converter.h" #include "common_audio/audio_converter.h"
#include <cstring> #include <cstring>
#include <memory>
#include <utility>
#include <vector>
#include "webrtc/base/checks.h" #include "common_audio/channel_buffer.h"
#include "webrtc/base/safe_conversions.h" #include "common_audio/resampler/push_sinc_resampler.h"
#include "webrtc/common_audio/channel_buffer.h" #include "rtc_base/checks.h"
#include "webrtc/common_audio/resampler/push_sinc_resampler.h" #include "rtc_base/numerics/safe_conversions.h"
#include "webrtc/system_wrappers/include/scoped_vector.h"
using rtc::checked_cast;
namespace webrtc { namespace webrtc {
class CopyConverter : public AudioConverter { class CopyConverter : public AudioConverter {
public: public:
CopyConverter(int src_channels, size_t src_frames, int dst_channels, CopyConverter(size_t src_channels,
size_t src_frames,
size_t dst_channels,
size_t dst_frames) size_t dst_frames)
: AudioConverter(src_channels, src_frames, dst_channels, dst_frames) {} : AudioConverter(src_channels, src_frames, dst_channels, dst_frames) {}
~CopyConverter() override {}; ~CopyConverter() override {}
void Convert(const float* const* src, size_t src_size, float* const* dst, void Convert(const float* const* src,
size_t src_size,
float* const* dst,
size_t dst_capacity) override { size_t dst_capacity) override {
CheckSizes(src_size, dst_capacity); CheckSizes(src_size, dst_capacity);
if (src != dst) { if (src != dst) {
for (int i = 0; i < src_channels(); ++i) for (size_t i = 0; i < src_channels(); ++i)
std::memcpy(dst[i], src[i], dst_frames() * sizeof(*dst[i])); std::memcpy(dst[i], src[i], dst_frames() * sizeof(*dst[i]));
} }
} }
@ -41,17 +45,21 @@ class CopyConverter : public AudioConverter {
class UpmixConverter : public AudioConverter { class UpmixConverter : public AudioConverter {
public: public:
UpmixConverter(int src_channels, size_t src_frames, int dst_channels, UpmixConverter(size_t src_channels,
size_t src_frames,
size_t dst_channels,
size_t dst_frames) size_t dst_frames)
: AudioConverter(src_channels, src_frames, dst_channels, dst_frames) {} : AudioConverter(src_channels, src_frames, dst_channels, dst_frames) {}
~UpmixConverter() override {}; ~UpmixConverter() override {}
void Convert(const float* const* src, size_t src_size, float* const* dst, void Convert(const float* const* src,
size_t src_size,
float* const* dst,
size_t dst_capacity) override { size_t dst_capacity) override {
CheckSizes(src_size, dst_capacity); CheckSizes(src_size, dst_capacity);
for (size_t i = 0; i < dst_frames(); ++i) { for (size_t i = 0; i < dst_frames(); ++i) {
const float value = src[0][i]; const float value = src[0][i];
for (int j = 0; j < dst_channels(); ++j) for (size_t j = 0; j < dst_channels(); ++j)
dst[j][i] = value; dst[j][i] = value;
} }
} }
@ -59,19 +67,22 @@ class UpmixConverter : public AudioConverter {
class DownmixConverter : public AudioConverter { class DownmixConverter : public AudioConverter {
public: public:
DownmixConverter(int src_channels, size_t src_frames, int dst_channels, DownmixConverter(size_t src_channels,
size_t src_frames,
size_t dst_channels,
size_t dst_frames) size_t dst_frames)
: AudioConverter(src_channels, src_frames, dst_channels, dst_frames) { : AudioConverter(src_channels, src_frames, dst_channels, dst_frames) {}
} ~DownmixConverter() override {}
~DownmixConverter() override {};
void Convert(const float* const* src, size_t src_size, float* const* dst, void Convert(const float* const* src,
size_t src_size,
float* const* dst,
size_t dst_capacity) override { size_t dst_capacity) override {
CheckSizes(src_size, dst_capacity); CheckSizes(src_size, dst_capacity);
float* dst_mono = dst[0]; float* dst_mono = dst[0];
for (size_t i = 0; i < src_frames(); ++i) { for (size_t i = 0; i < src_frames(); ++i) {
float sum = 0; float sum = 0;
for (int j = 0; j < src_channels(); ++j) for (size_t j = 0; j < src_channels(); ++j)
sum += src[j][i]; sum += src[j][i];
dst_mono[i] = sum / src_channels(); dst_mono[i] = sum / src_channels();
} }
@ -80,16 +91,21 @@ class DownmixConverter : public AudioConverter {
class ResampleConverter : public AudioConverter { class ResampleConverter : public AudioConverter {
public: public:
ResampleConverter(int src_channels, size_t src_frames, int dst_channels, ResampleConverter(size_t src_channels,
size_t src_frames,
size_t dst_channels,
size_t dst_frames) size_t dst_frames)
: AudioConverter(src_channels, src_frames, dst_channels, dst_frames) { : AudioConverter(src_channels, src_frames, dst_channels, dst_frames) {
resamplers_.reserve(src_channels); resamplers_.reserve(src_channels);
for (int i = 0; i < src_channels; ++i) for (size_t i = 0; i < src_channels; ++i)
resamplers_.push_back(new PushSincResampler(src_frames, dst_frames)); resamplers_.push_back(std::unique_ptr<PushSincResampler>(
new PushSincResampler(src_frames, dst_frames)));
} }
~ResampleConverter() override {}; ~ResampleConverter() override {}
void Convert(const float* const* src, size_t src_size, float* const* dst, void Convert(const float* const* src,
size_t src_size,
float* const* dst,
size_t dst_capacity) override { size_t dst_capacity) override {
CheckSizes(src_size, dst_capacity); CheckSizes(src_size, dst_capacity);
for (size_t i = 0; i < resamplers_.size(); ++i) for (size_t i = 0; i < resamplers_.size(); ++i)
@ -97,69 +113,73 @@ class ResampleConverter : public AudioConverter {
} }
private: private:
ScopedVector<PushSincResampler> resamplers_; std::vector<std::unique_ptr<PushSincResampler>> resamplers_;
}; };
// Apply a vector of converters in serial, in the order given. At least two // Apply a vector of converters in serial, in the order given. At least two
// converters must be provided. // converters must be provided.
class CompositionConverter : public AudioConverter { class CompositionConverter : public AudioConverter {
public: public:
CompositionConverter(ScopedVector<AudioConverter> converters) explicit CompositionConverter(
: converters_(converters.Pass()) { std::vector<std::unique_ptr<AudioConverter>> converters)
RTC_CHECK_GE(converters_.size(), 2u); : converters_(std::move(converters)) {
RTC_CHECK_GE(converters_.size(), 2);
// We need an intermediate buffer after every converter. // We need an intermediate buffer after every converter.
for (auto it = converters_.begin(); it != converters_.end() - 1; ++it) for (auto it = converters_.begin(); it != converters_.end() - 1; ++it)
buffers_.push_back(new ChannelBuffer<float>((*it)->dst_frames(), buffers_.push_back(
(*it)->dst_channels())); std::unique_ptr<ChannelBuffer<float>>(new ChannelBuffer<float>(
(*it)->dst_frames(), (*it)->dst_channels())));
} }
~CompositionConverter() override {}; ~CompositionConverter() override {}
void Convert(const float* const* src, size_t src_size, float* const* dst, void Convert(const float* const* src,
size_t src_size,
float* const* dst,
size_t dst_capacity) override { size_t dst_capacity) override {
converters_.front()->Convert(src, src_size, buffers_.front()->channels(), converters_.front()->Convert(src, src_size, buffers_.front()->channels(),
buffers_.front()->size()); buffers_.front()->size());
for (size_t i = 2; i < converters_.size(); ++i) { for (size_t i = 2; i < converters_.size(); ++i) {
auto src_buffer = buffers_[i - 2]; auto& src_buffer = buffers_[i - 2];
auto dst_buffer = buffers_[i - 1]; auto& dst_buffer = buffers_[i - 1];
converters_[i]->Convert(src_buffer->channels(), converters_[i]->Convert(src_buffer->channels(), src_buffer->size(),
src_buffer->size(), dst_buffer->channels(), dst_buffer->size());
dst_buffer->channels(),
dst_buffer->size());
} }
converters_.back()->Convert(buffers_.back()->channels(), converters_.back()->Convert(buffers_.back()->channels(),
buffers_.back()->size(), dst, dst_capacity); buffers_.back()->size(), dst, dst_capacity);
} }
private: private:
ScopedVector<AudioConverter> converters_; std::vector<std::unique_ptr<AudioConverter>> converters_;
ScopedVector<ChannelBuffer<float>> buffers_; std::vector<std::unique_ptr<ChannelBuffer<float>>> buffers_;
}; };
rtc::scoped_ptr<AudioConverter> AudioConverter::Create(int src_channels, std::unique_ptr<AudioConverter> AudioConverter::Create(size_t src_channels,
size_t src_frames, size_t src_frames,
int dst_channels, size_t dst_channels,
size_t dst_frames) { size_t dst_frames) {
rtc::scoped_ptr<AudioConverter> sp; std::unique_ptr<AudioConverter> sp;
if (src_channels > dst_channels) { if (src_channels > dst_channels) {
if (src_frames != dst_frames) { if (src_frames != dst_frames) {
ScopedVector<AudioConverter> converters; std::vector<std::unique_ptr<AudioConverter>> converters;
converters.push_back(new DownmixConverter(src_channels, src_frames, converters.push_back(std::unique_ptr<AudioConverter>(new DownmixConverter(
dst_channels, src_frames)); src_channels, src_frames, dst_channels, src_frames)));
converters.push_back(new ResampleConverter(dst_channels, src_frames, converters.push_back(
dst_channels, dst_frames)); std::unique_ptr<AudioConverter>(new ResampleConverter(
sp.reset(new CompositionConverter(converters.Pass())); dst_channels, src_frames, dst_channels, dst_frames)));
sp.reset(new CompositionConverter(std::move(converters)));
} else { } else {
sp.reset(new DownmixConverter(src_channels, src_frames, dst_channels, sp.reset(new DownmixConverter(src_channels, src_frames, dst_channels,
dst_frames)); dst_frames));
} }
} else if (src_channels < dst_channels) { } else if (src_channels < dst_channels) {
if (src_frames != dst_frames) { if (src_frames != dst_frames) {
ScopedVector<AudioConverter> converters; std::vector<std::unique_ptr<AudioConverter>> converters;
converters.push_back(new ResampleConverter(src_channels, src_frames, converters.push_back(
src_channels, dst_frames)); std::unique_ptr<AudioConverter>(new ResampleConverter(
converters.push_back(new UpmixConverter(src_channels, dst_frames, src_channels, src_frames, src_channels, dst_frames)));
dst_channels, dst_frames)); converters.push_back(std::unique_ptr<AudioConverter>(new UpmixConverter(
sp.reset(new CompositionConverter(converters.Pass())); src_channels, dst_frames, dst_channels, dst_frames)));
sp.reset(new CompositionConverter(std::move(converters)));
} else { } else {
sp.reset(new UpmixConverter(src_channels, src_frames, dst_channels, sp.reset(new UpmixConverter(src_channels, src_frames, dst_channels,
dst_frames)); dst_frames));
@ -168,22 +188,21 @@ rtc::scoped_ptr<AudioConverter> AudioConverter::Create(int src_channels,
sp.reset(new ResampleConverter(src_channels, src_frames, dst_channels, sp.reset(new ResampleConverter(src_channels, src_frames, dst_channels,
dst_frames)); dst_frames));
} else { } else {
sp.reset(new CopyConverter(src_channels, src_frames, dst_channels, sp.reset(
dst_frames)); new CopyConverter(src_channels, src_frames, dst_channels, dst_frames));
} }
return sp.Pass(); return sp;
} }
// For CompositionConverter. // For CompositionConverter.
AudioConverter::AudioConverter() AudioConverter::AudioConverter()
: src_channels_(0), : src_channels_(0), src_frames_(0), dst_channels_(0), dst_frames_(0) {}
src_frames_(0),
dst_channels_(0),
dst_frames_(0) {}
AudioConverter::AudioConverter(int src_channels, size_t src_frames, AudioConverter::AudioConverter(size_t src_channels,
int dst_channels, size_t dst_frames) size_t src_frames,
size_t dst_channels,
size_t dst_frames)
: src_channels_(src_channels), : src_channels_(src_channels),
src_frames_(src_frames), src_frames_(src_frames),
dst_channels_(dst_channels), dst_channels_(dst_channels),

View File

@ -8,11 +8,14 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#ifndef WEBRTC_COMMON_AUDIO_AUDIO_CONVERTER_H_ #ifndef COMMON_AUDIO_AUDIO_CONVERTER_H_
#define WEBRTC_COMMON_AUDIO_AUDIO_CONVERTER_H_ #define COMMON_AUDIO_AUDIO_CONVERTER_H_
#include "webrtc/base/constructormagic.h" #include <stddef.h>
#include "webrtc/base/scoped_ptr.h"
#include <memory>
#include "rtc_base/constructor_magic.h"
namespace webrtc { namespace webrtc {
@ -26,36 +29,40 @@ class AudioConverter {
public: public:
// Returns a new AudioConverter, which will use the supplied format for its // Returns a new AudioConverter, which will use the supplied format for its
// lifetime. Caller is responsible for the memory. // lifetime. Caller is responsible for the memory.
static rtc::scoped_ptr<AudioConverter> Create(int src_channels, static std::unique_ptr<AudioConverter> Create(size_t src_channels,
size_t src_frames, size_t src_frames,
int dst_channels, size_t dst_channels,
size_t dst_frames); size_t dst_frames);
virtual ~AudioConverter() {}; virtual ~AudioConverter() {}
// Convert |src|, containing |src_size| samples, to |dst|, having a sample // Convert |src|, containing |src_size| samples, to |dst|, having a sample
// capacity of |dst_capacity|. Both point to a series of buffers containing // capacity of |dst_capacity|. Both point to a series of buffers containing
// the samples for each channel. The sizes must correspond to the format // the samples for each channel. The sizes must correspond to the format
// passed to Create(). // passed to Create().
virtual void Convert(const float* const* src, size_t src_size, virtual void Convert(const float* const* src,
float* const* dst, size_t dst_capacity) = 0; size_t src_size,
float* const* dst,
size_t dst_capacity) = 0;
int src_channels() const { return src_channels_; } size_t src_channels() const { return src_channels_; }
size_t src_frames() const { return src_frames_; } size_t src_frames() const { return src_frames_; }
int dst_channels() const { return dst_channels_; } size_t dst_channels() const { return dst_channels_; }
size_t dst_frames() const { return dst_frames_; } size_t dst_frames() const { return dst_frames_; }
protected: protected:
AudioConverter(); AudioConverter();
AudioConverter(int src_channels, size_t src_frames, int dst_channels, AudioConverter(size_t src_channels,
size_t src_frames,
size_t dst_channels,
size_t dst_frames); size_t dst_frames);
// Helper to RTC_CHECK that inputs are correctly sized. // Helper to RTC_CHECK that inputs are correctly sized.
void CheckSizes(size_t src_size, size_t dst_capacity) const; void CheckSizes(size_t src_size, size_t dst_capacity) const;
private: private:
const int src_channels_; const size_t src_channels_;
const size_t src_frames_; const size_t src_frames_;
const int dst_channels_; const size_t dst_channels_;
const size_t dst_frames_; const size_t dst_frames_;
RTC_DISALLOW_COPY_AND_ASSIGN(AudioConverter); RTC_DISALLOW_COPY_AND_ASSIGN(AudioConverter);
@ -63,4 +70,4 @@ class AudioConverter {
} // namespace webrtc } // namespace webrtc
#endif // WEBRTC_COMMON_AUDIO_AUDIO_CONVERTER_H_ #endif // COMMON_AUDIO_AUDIO_CONVERTER_H_

View File

@ -1,75 +0,0 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/common_audio/audio_ring_buffer.h"
#include "webrtc/base/checks.h"
#include "webrtc/common_audio/ring_buffer.h"
// This is a simple multi-channel wrapper over the ring_buffer.h C interface.
namespace webrtc {
AudioRingBuffer::AudioRingBuffer(size_t channels, size_t max_frames) {
buffers_.reserve(channels);
for (size_t i = 0; i < channels; ++i)
buffers_.push_back(WebRtc_CreateBuffer(max_frames, sizeof(float)));
}
AudioRingBuffer::~AudioRingBuffer() {
for (auto buf : buffers_)
WebRtc_FreeBuffer(buf);
}
void AudioRingBuffer::Write(const float* const* data, size_t channels,
size_t frames) {
RTC_DCHECK_EQ(buffers_.size(), channels);
for (size_t i = 0; i < channels; ++i) {
const size_t written = WebRtc_WriteBuffer(buffers_[i], data[i], frames);
RTC_CHECK_EQ(written, frames);
}
}
void AudioRingBuffer::Read(float* const* data, size_t channels, size_t frames) {
RTC_DCHECK_EQ(buffers_.size(), channels);
for (size_t i = 0; i < channels; ++i) {
const size_t read =
WebRtc_ReadBuffer(buffers_[i], nullptr, data[i], frames);
RTC_CHECK_EQ(read, frames);
}
}
size_t AudioRingBuffer::ReadFramesAvailable() const {
// All buffers have the same amount available.
return WebRtc_available_read(buffers_[0]);
}
size_t AudioRingBuffer::WriteFramesAvailable() const {
// All buffers have the same amount available.
return WebRtc_available_write(buffers_[0]);
}
void AudioRingBuffer::MoveReadPositionForward(size_t frames) {
for (auto buf : buffers_) {
const size_t moved =
static_cast<size_t>(WebRtc_MoveReadPtr(buf, static_cast<int>(frames)));
RTC_CHECK_EQ(moved, frames);
}
}
void AudioRingBuffer::MoveReadPositionBackward(size_t frames) {
for (auto buf : buffers_) {
const size_t moved = static_cast<size_t>(
-WebRtc_MoveReadPtr(buf, -static_cast<int>(frames)));
RTC_CHECK_EQ(moved, frames);
}
}
} // namespace webrtc

View File

@ -1,56 +0,0 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_COMMON_AUDIO_AUDIO_RING_BUFFER_H_
#define WEBRTC_COMMON_AUDIO_AUDIO_RING_BUFFER_H_
#include <stddef.h>
#include <vector>
struct RingBuffer;
namespace webrtc {
// A ring buffer tailored for float deinterleaved audio. Any operation that
// cannot be performed as requested will cause a crash (e.g. insufficient data
// in the buffer to fulfill a read request.)
class AudioRingBuffer final {
public:
// Specify the number of channels and maximum number of frames the buffer will
// contain.
AudioRingBuffer(size_t channels, size_t max_frames);
~AudioRingBuffer();
// Copies |data| to the buffer and advances the write pointer. |channels| must
// be the same as at creation time.
void Write(const float* const* data, size_t channels, size_t frames);
// Copies from the buffer to |data| and advances the read pointer. |channels|
// must be the same as at creation time.
void Read(float* const* data, size_t channels, size_t frames);
size_t ReadFramesAvailable() const;
size_t WriteFramesAvailable() const;
// Moves the read position. The forward version advances the read pointer
// towards the write pointer and the backward verison withdraws the read
// pointer away from the write pointer (i.e. flushing and stuffing the buffer
// respectively.)
void MoveReadPositionForward(size_t frames);
void MoveReadPositionBackward(size_t frames);
private:
// We don't use a ScopedVector because it doesn't support a specialized
// deleter (like scoped_ptr for instance.)
std::vector<RingBuffer*> buffers_;
};
} // namespace webrtc
#endif // WEBRTC_COMMON_AUDIO_AUDIO_RING_BUFFER_H_

View File

@ -8,9 +8,7 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include "webrtc/common_audio/include/audio_util.h" #include "common_audio/include/audio_util.h"
#include "webrtc/typedefs.h"
namespace webrtc { namespace webrtc {
@ -24,6 +22,11 @@ void S16ToFloat(const int16_t* src, size_t size, float* dest) {
dest[i] = S16ToFloat(src[i]); dest[i] = S16ToFloat(src[i]);
} }
void S16ToFloatS16(const int16_t* src, size_t size, float* dest) {
for (size_t i = 0; i < size; ++i)
dest[i] = src[i];
}
void FloatS16ToS16(const float* src, size_t size, int16_t* dest) { void FloatS16ToS16(const float* src, size_t size, int16_t* dest) {
for (size_t i = 0; i < size; ++i) for (size_t i = 0; i < size; ++i)
dest[i] = FloatS16ToS16(src[i]); dest[i] = FloatS16ToS16(src[i]);

View File

@ -1,236 +0,0 @@
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/common_audio/blocker.h"
#include <string.h>
#include "webrtc/base/checks.h"
namespace {
// Adds |a| and |b| frame by frame into |result| (basically matrix addition).
void AddFrames(const float* const* a,
size_t a_start_index,
const float* const* b,
int b_start_index,
size_t num_frames,
int num_channels,
float* const* result,
size_t result_start_index) {
for (int i = 0; i < num_channels; ++i) {
for (size_t j = 0; j < num_frames; ++j) {
result[i][j + result_start_index] =
a[i][j + a_start_index] + b[i][j + b_start_index];
}
}
}
// Copies |src| into |dst| channel by channel.
void CopyFrames(const float* const* src,
size_t src_start_index,
size_t num_frames,
int num_channels,
float* const* dst,
size_t dst_start_index) {
for (int i = 0; i < num_channels; ++i) {
memcpy(&dst[i][dst_start_index],
&src[i][src_start_index],
num_frames * sizeof(dst[i][dst_start_index]));
}
}
// Moves |src| into |dst| channel by channel.
void MoveFrames(const float* const* src,
size_t src_start_index,
size_t num_frames,
int num_channels,
float* const* dst,
size_t dst_start_index) {
for (int i = 0; i < num_channels; ++i) {
memmove(&dst[i][dst_start_index],
&src[i][src_start_index],
num_frames * sizeof(dst[i][dst_start_index]));
}
}
void ZeroOut(float* const* buffer,
size_t starting_idx,
size_t num_frames,
int num_channels) {
for (int i = 0; i < num_channels; ++i) {
memset(&buffer[i][starting_idx], 0,
num_frames * sizeof(buffer[i][starting_idx]));
}
}
// Pointwise multiplies each channel of |frames| with |window|. Results are
// stored in |frames|.
void ApplyWindow(const float* window,
size_t num_frames,
int num_channels,
float* const* frames) {
for (int i = 0; i < num_channels; ++i) {
for (size_t j = 0; j < num_frames; ++j) {
frames[i][j] = frames[i][j] * window[j];
}
}
}
size_t gcd(size_t a, size_t b) {
size_t tmp;
while (b) {
tmp = a;
a = b;
b = tmp % b;
}
return a;
}
} // namespace
namespace webrtc {
Blocker::Blocker(size_t chunk_size,
size_t block_size,
int num_input_channels,
int num_output_channels,
const float* window,
size_t shift_amount,
BlockerCallback* callback)
: chunk_size_(chunk_size),
block_size_(block_size),
num_input_channels_(num_input_channels),
num_output_channels_(num_output_channels),
initial_delay_(block_size_ - gcd(chunk_size, shift_amount)),
frame_offset_(0),
input_buffer_(num_input_channels_, chunk_size_ + initial_delay_),
output_buffer_(chunk_size_ + initial_delay_, num_output_channels_),
input_block_(block_size_, num_input_channels_),
output_block_(block_size_, num_output_channels_),
window_(new float[block_size_]),
shift_amount_(shift_amount),
callback_(callback) {
RTC_CHECK_LE(num_output_channels_, num_input_channels_);
RTC_CHECK_LE(shift_amount_, block_size_);
memcpy(window_.get(), window, block_size_ * sizeof(*window_.get()));
input_buffer_.MoveReadPositionBackward(initial_delay_);
}
// When block_size < chunk_size the input and output buffers look like this:
//
// delay* chunk_size chunk_size + delay*
// buffer: <-------------|---------------------|---------------|>
// _a_ _b_ _c_
//
// On each call to ProcessChunk():
// 1. New input gets read into sections _b_ and _c_ of the input buffer.
// 2. We block starting from frame_offset.
// 3. We block until we reach a block |bl| that doesn't contain any frames
// from sections _a_ or _b_ of the input buffer.
// 4. We window the current block, fire the callback for processing, window
// again, and overlap/add to the output buffer.
// 5. We copy sections _a_ and _b_ of the output buffer into output.
// 6. For both the input and the output buffers, we copy section _c_ into
// section _a_.
// 7. We set the new frame_offset to be the difference between the first frame
// of |bl| and the border between sections _b_ and _c_.
//
// When block_size > chunk_size the input and output buffers look like this:
//
// chunk_size delay* chunk_size + delay*
// buffer: <-------------|---------------------|---------------|>
// _a_ _b_ _c_
//
// On each call to ProcessChunk():
// The procedure is the same as above, except for:
// 1. New input gets read into section _c_ of the input buffer.
// 3. We block until we reach a block |bl| that doesn't contain any frames
// from section _a_ of the input buffer.
// 5. We copy section _a_ of the output buffer into output.
// 6. For both the input and the output buffers, we copy sections _b_ and _c_
// into section _a_ and _b_.
// 7. We set the new frame_offset to be the difference between the first frame
// of |bl| and the border between sections _a_ and _b_.
//
// * delay here refers to inintial_delay_
//
// TODO(claguna): Look at using ring buffers to eliminate some copies.
void Blocker::ProcessChunk(const float* const* input,
size_t chunk_size,
int num_input_channels,
int num_output_channels,
float* const* output) {
RTC_CHECK_EQ(chunk_size, chunk_size_);
RTC_CHECK_EQ(num_input_channels, num_input_channels_);
RTC_CHECK_EQ(num_output_channels, num_output_channels_);
input_buffer_.Write(input, num_input_channels, chunk_size_);
size_t first_frame_in_block = frame_offset_;
// Loop through blocks.
while (first_frame_in_block < chunk_size_) {
input_buffer_.Read(input_block_.channels(), num_input_channels,
block_size_);
input_buffer_.MoveReadPositionBackward(block_size_ - shift_amount_);
ApplyWindow(window_.get(),
block_size_,
num_input_channels_,
input_block_.channels());
callback_->ProcessBlock(input_block_.channels(),
block_size_,
num_input_channels_,
num_output_channels_,
output_block_.channels());
ApplyWindow(window_.get(),
block_size_,
num_output_channels_,
output_block_.channels());
AddFrames(output_buffer_.channels(),
first_frame_in_block,
output_block_.channels(),
0,
block_size_,
num_output_channels_,
output_buffer_.channels(),
first_frame_in_block);
first_frame_in_block += shift_amount_;
}
// Copy output buffer to output
CopyFrames(output_buffer_.channels(),
0,
chunk_size_,
num_output_channels_,
output,
0);
// Copy output buffer [chunk_size_, chunk_size_ + initial_delay]
// to output buffer [0, initial_delay], zero the rest.
MoveFrames(output_buffer_.channels(),
chunk_size,
initial_delay_,
num_output_channels_,
output_buffer_.channels(),
0);
ZeroOut(output_buffer_.channels(),
initial_delay_,
chunk_size_,
num_output_channels_);
// Calculate new starting frames.
frame_offset_ = first_frame_in_block - chunk_size_;
}
} // namespace webrtc

View File

@ -1,123 +0,0 @@
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_INTERNAL_BEAMFORMER_BLOCKER_H_
#define WEBRTC_INTERNAL_BEAMFORMER_BLOCKER_H_
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_audio/audio_ring_buffer.h"
#include "webrtc/common_audio/channel_buffer.h"
namespace webrtc {
// The callback function to process audio in the time domain. Input has already
// been windowed, and output will be windowed. The number of input channels
// must be >= the number of output channels.
class BlockerCallback {
public:
virtual ~BlockerCallback() {}
virtual void ProcessBlock(const float* const* input,
size_t num_frames,
int num_input_channels,
int num_output_channels,
float* const* output) = 0;
};
// The main purpose of Blocker is to abstract away the fact that often we
// receive a different number of audio frames than our transform takes. For
// example, most FFTs work best when the fft-size is a power of 2, but suppose
// we receive 20ms of audio at a sample rate of 48000. That comes to 960 frames
// of audio, which is not a power of 2. Blocker allows us to specify the
// transform and all other necessary processing via the Process() callback
// function without any constraints on the transform-size
// (read: |block_size_|) or received-audio-size (read: |chunk_size_|).
// We handle this for the multichannel audio case, allowing for different
// numbers of input and output channels (for example, beamforming takes 2 or
// more input channels and returns 1 output channel). Audio signals are
// represented as deinterleaved floats in the range [-1, 1].
//
// Blocker is responsible for:
// - blocking audio while handling potential discontinuities on the edges
// of chunks
// - windowing blocks before sending them to Process()
// - windowing processed blocks, and overlap-adding them together before
// sending back a processed chunk
//
// To use blocker:
// 1. Impelment a BlockerCallback object |bc|.
// 2. Instantiate a Blocker object |b|, passing in |bc|.
// 3. As you receive audio, call b.ProcessChunk() to get processed audio.
//
// A small amount of delay is added to the first received chunk to deal with
// the difference in chunk/block sizes. This delay is <= chunk_size.
//
// Ownership of window is retained by the caller. That is, Blocker makes a
// copy of window and does not attempt to delete it.
class Blocker {
public:
Blocker(size_t chunk_size,
size_t block_size,
int num_input_channels,
int num_output_channels,
const float* window,
size_t shift_amount,
BlockerCallback* callback);
void ProcessChunk(const float* const* input,
size_t chunk_size,
int num_input_channels,
int num_output_channels,
float* const* output);
private:
const size_t chunk_size_;
const size_t block_size_;
const int num_input_channels_;
const int num_output_channels_;
// The number of frames of delay to add at the beginning of the first chunk.
const size_t initial_delay_;
// The frame index into the input buffer where the first block should be read
// from. This is necessary because shift_amount_ is not necessarily a
// multiple of chunk_size_, so blocks won't line up at the start of the
// buffer.
size_t frame_offset_;
// Since blocks nearly always overlap, there are certain blocks that require
// frames from the end of one chunk and the beginning of the next chunk. The
// input and output buffers are responsible for saving those frames between
// calls to ProcessChunk().
//
// Both contain |initial delay| + |chunk_size| frames. The input is a fairly
// standard FIFO, but due to the overlap-add it's harder to use an
// AudioRingBuffer for the output.
AudioRingBuffer input_buffer_;
ChannelBuffer<float> output_buffer_;
// Space for the input block (can't wrap because of windowing).
ChannelBuffer<float> input_block_;
// Space for the output block (can't wrap because of overlap/add).
ChannelBuffer<float> output_block_;
rtc::scoped_ptr<float[]> window_;
// The amount of frames between the start of contiguous blocks. For example,
// |shift_amount_| = |block_size_| / 2 for a Hann window.
size_t shift_amount_;
BlockerCallback* callback_;
};
} // namespace webrtc
#endif // WEBRTC_INTERNAL_BEAMFORMER_BLOCKER_H_

View File

@ -8,18 +8,25 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include "webrtc/common_audio/channel_buffer.h" #include "common_audio/channel_buffer.h"
#include <cstdint>
#include "common_audio/include/audio_util.h"
#include "rtc_base/checks.h"
namespace webrtc { namespace webrtc {
IFChannelBuffer::IFChannelBuffer(size_t num_frames, IFChannelBuffer::IFChannelBuffer(size_t num_frames,
int num_channels, size_t num_channels,
size_t num_bands) size_t num_bands)
: ivalid_(true), : ivalid_(true),
ibuf_(num_frames, num_channels, num_bands), ibuf_(num_frames, num_channels, num_bands),
fvalid_(true), fvalid_(true),
fbuf_(num_frames, num_channels, num_bands) {} fbuf_(num_frames, num_channels, num_bands) {}
IFChannelBuffer::~IFChannelBuffer() = default;
ChannelBuffer<int16_t>* IFChannelBuffer::ibuf() { ChannelBuffer<int16_t>* IFChannelBuffer::ibuf() {
RefreshI(); RefreshI();
fvalid_ = false; fvalid_ = false;
@ -44,10 +51,11 @@ const ChannelBuffer<float>* IFChannelBuffer::fbuf_const() const {
void IFChannelBuffer::RefreshF() const { void IFChannelBuffer::RefreshF() const {
if (!fvalid_) { if (!fvalid_) {
assert(ivalid_); RTC_DCHECK(ivalid_);
fbuf_.set_num_channels(ibuf_.num_channels());
const int16_t* const* int_channels = ibuf_.channels(); const int16_t* const* int_channels = ibuf_.channels();
float* const* float_channels = fbuf_.channels(); float* const* float_channels = fbuf_.channels();
for (int i = 0; i < ibuf_.num_channels(); ++i) { for (size_t i = 0; i < ibuf_.num_channels(); ++i) {
for (size_t j = 0; j < ibuf_.num_frames(); ++j) { for (size_t j = 0; j < ibuf_.num_frames(); ++j) {
float_channels[i][j] = int_channels[i][j]; float_channels[i][j] = int_channels[i][j];
} }
@ -58,13 +66,12 @@ void IFChannelBuffer::RefreshF() const {
void IFChannelBuffer::RefreshI() const { void IFChannelBuffer::RefreshI() const {
if (!ivalid_) { if (!ivalid_) {
assert(fvalid_); RTC_DCHECK(fvalid_);
int16_t* const* int_channels = ibuf_.channels(); int16_t* const* int_channels = ibuf_.channels();
ibuf_.set_num_channels(fbuf_.num_channels());
const float* const* float_channels = fbuf_.channels(); const float* const* float_channels = fbuf_.channels();
for (int i = 0; i < ibuf_.num_channels(); ++i) { for (size_t i = 0; i < fbuf_.num_channels(); ++i) {
FloatS16ToS16(float_channels[i], FloatS16ToS16(float_channels[i], ibuf_.num_frames(), int_channels[i]);
ibuf_.num_frames(),
int_channels[i]);
} }
ivalid_ = true; ivalid_ = true;
} }

View File

@ -8,17 +8,18 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_CHANNEL_BUFFER_H_ #ifndef COMMON_AUDIO_CHANNEL_BUFFER_H_
#define WEBRTC_MODULES_AUDIO_PROCESSING_CHANNEL_BUFFER_H_ #define COMMON_AUDIO_CHANNEL_BUFFER_H_
#include <string.h> #include <string.h>
#include "webrtc/base/checks.h" #include <memory>
#include "webrtc/base/scoped_ptr.h" #include <vector>
#include "webrtc/common_audio/include/audio_util.h"
#ifndef WEBRTC_AUDIO_PROCESSING_ONLY_BUILD #include "api/array_view.h"
#include "webrtc/test/testsupport/gtest_prod_util.h" #include "common_audio/include/audio_util.h"
#endif #include "rtc_base/checks.h"
#include "rtc_base/gtest_prod_util.h"
namespace webrtc { namespace webrtc {
@ -41,49 +42,68 @@ namespace webrtc {
template <typename T> template <typename T>
class ChannelBuffer { class ChannelBuffer {
public: public:
ChannelBuffer(size_t num_frames, ChannelBuffer(size_t num_frames, size_t num_channels, size_t num_bands = 1)
int num_channels,
size_t num_bands = 1)
: data_(new T[num_frames * num_channels]()), : data_(new T[num_frames * num_channels]()),
channels_(new T*[num_channels * num_bands]), channels_(new T*[num_channels * num_bands]),
bands_(new T*[num_channels * num_bands]), bands_(new T*[num_channels * num_bands]),
num_frames_(num_frames), num_frames_(num_frames),
num_frames_per_band_(num_frames / num_bands), num_frames_per_band_(num_frames / num_bands),
num_allocated_channels_(num_channels),
num_channels_(num_channels), num_channels_(num_channels),
num_bands_(num_bands) { num_bands_(num_bands),
for (int i = 0; i < num_channels_; ++i) { bands_view_(num_allocated_channels_,
for (size_t j = 0; j < num_bands_; ++j) { std::vector<rtc::ArrayView<T>>(num_bands_)),
channels_[j * num_channels_ + i] = channels_view_(
&data_[i * num_frames_ + j * num_frames_per_band_]; num_bands_,
bands_[i * num_bands_ + j] = channels_[j * num_channels_ + i]; std::vector<rtc::ArrayView<T>>(num_allocated_channels_)) {
// Temporarily cast away const_ness to allow populating the array views.
auto* bands_view =
const_cast<std::vector<std::vector<rtc::ArrayView<T>>>*>(&bands_view_);
auto* channels_view =
const_cast<std::vector<std::vector<rtc::ArrayView<T>>>*>(
&channels_view_);
for (size_t ch = 0; ch < num_allocated_channels_; ++ch) {
for (size_t band = 0; band < num_bands_; ++band) {
(*channels_view)[band][ch] = rtc::ArrayView<T>(
&data_[ch * num_frames_ + band * num_frames_per_band_],
num_frames_per_band_);
(*bands_view)[ch][band] = channels_view_[band][ch];
channels_[band * num_allocated_channels_ + ch] =
channels_view_[band][ch].data();
bands_[ch * num_bands_ + band] =
channels_[band * num_allocated_channels_ + ch];
} }
} }
} }
// Returns a pointer array to the full-band channels (or lower band channels). // Returns a pointer array to the channels.
// Usage: // If band is explicitly specificed, the channels for a specific band are
// channels()[channel][sample]. // returned and the usage becomes: channels(band)[channel][sample].
// Where:
// 0 <= channel < |num_channels_|
// 0 <= sample < |num_frames_|
T* const* channels() { return channels(0); }
const T* const* channels() const { return channels(0); }
// Returns a pointer array to the channels for a specific band.
// Usage:
// channels(band)[channel][sample].
// Where: // Where:
// 0 <= band < |num_bands_| // 0 <= band < |num_bands_|
// 0 <= channel < |num_channels_| // 0 <= channel < |num_allocated_channels_|
// 0 <= sample < |num_frames_per_band_| // 0 <= sample < |num_frames_per_band_|
const T* const* channels(size_t band) const {
// If band is not explicitly specified, the full-band channels (or lower band
// channels) are returned and the usage becomes: channels()[channel][sample].
// Where:
// 0 <= channel < |num_allocated_channels_|
// 0 <= sample < |num_frames_|
const T* const* channels(size_t band = 0) const {
RTC_DCHECK_LT(band, num_bands_); RTC_DCHECK_LT(band, num_bands_);
return &channels_[band * num_channels_]; return &channels_[band * num_allocated_channels_];
} }
T* const* channels(size_t band) { T* const* channels(size_t band = 0) {
const ChannelBuffer<T>* t = this; const ChannelBuffer<T>* t = this;
return const_cast<T* const*>(t->channels(band)); return const_cast<T* const*>(t->channels(band));
} }
rtc::ArrayView<const rtc::ArrayView<T>> channels_view(size_t band = 0) {
return channels_view_[band];
}
rtc::ArrayView<const rtc::ArrayView<T>> channels_view(size_t band = 0) const {
return channels_view_[band];
}
// Returns a pointer array to the bands for a specific channel. // Returns a pointer array to the bands for a specific channel.
// Usage: // Usage:
@ -92,21 +112,28 @@ class ChannelBuffer {
// 0 <= channel < |num_channels_| // 0 <= channel < |num_channels_|
// 0 <= band < |num_bands_| // 0 <= band < |num_bands_|
// 0 <= sample < |num_frames_per_band_| // 0 <= sample < |num_frames_per_band_|
const T* const* bands(int channel) const { const T* const* bands(size_t channel) const {
RTC_DCHECK_LT(channel, num_channels_); RTC_DCHECK_LT(channel, num_channels_);
RTC_DCHECK_GE(channel, 0); RTC_DCHECK_GE(channel, 0);
return &bands_[channel * num_bands_]; return &bands_[channel * num_bands_];
} }
T* const* bands(int channel) { T* const* bands(size_t channel) {
const ChannelBuffer<T>* t = this; const ChannelBuffer<T>* t = this;
return const_cast<T* const*>(t->bands(channel)); return const_cast<T* const*>(t->bands(channel));
} }
rtc::ArrayView<const rtc::ArrayView<T>> bands_view(size_t channel) {
return bands_view_[channel];
}
rtc::ArrayView<const rtc::ArrayView<T>> bands_view(size_t channel) const {
return bands_view_[channel];
}
// Sets the |slice| pointers to the |start_frame| position for each channel. // Sets the |slice| pointers to the |start_frame| position for each channel.
// Returns |slice| for convenience. // Returns |slice| for convenience.
const T* const* Slice(T** slice, size_t start_frame) const { const T* const* Slice(T** slice, size_t start_frame) const {
RTC_DCHECK_LT(start_frame, num_frames_); RTC_DCHECK_LT(start_frame, num_frames_);
for (int i = 0; i < num_channels_; ++i) for (size_t i = 0; i < num_channels_; ++i)
slice[i] = &channels_[i][start_frame]; slice[i] = &channels_[i][start_frame];
return slice; return slice;
} }
@ -117,9 +144,14 @@ class ChannelBuffer {
size_t num_frames() const { return num_frames_; } size_t num_frames() const { return num_frames_; }
size_t num_frames_per_band() const { return num_frames_per_band_; } size_t num_frames_per_band() const { return num_frames_per_band_; }
int num_channels() const { return num_channels_; } size_t num_channels() const { return num_channels_; }
size_t num_bands() const { return num_bands_; } size_t num_bands() const { return num_bands_; }
size_t size() const {return num_frames_ * num_channels_; } size_t size() const { return num_frames_ * num_allocated_channels_; }
void set_num_channels(size_t num_channels) {
RTC_DCHECK_LE(num_channels, num_allocated_channels_);
num_channels_ = num_channels;
}
void SetDataForTesting(const T* data, size_t size) { void SetDataForTesting(const T* data, size_t size) {
RTC_CHECK_EQ(size, this->size()); RTC_CHECK_EQ(size, this->size());
@ -127,13 +159,18 @@ class ChannelBuffer {
} }
private: private:
rtc::scoped_ptr<T[]> data_; std::unique_ptr<T[]> data_;
rtc::scoped_ptr<T* []> channels_; std::unique_ptr<T*[]> channels_;
rtc::scoped_ptr<T* []> bands_; std::unique_ptr<T*[]> bands_;
const size_t num_frames_; const size_t num_frames_;
const size_t num_frames_per_band_; const size_t num_frames_per_band_;
const int num_channels_; // Number of channels the internal buffer holds.
const size_t num_allocated_channels_;
// Number of channels the user sees.
size_t num_channels_;
const size_t num_bands_; const size_t num_bands_;
const std::vector<std::vector<rtc::ArrayView<T>>> bands_view_;
const std::vector<std::vector<rtc::ArrayView<T>>> channels_view_;
}; };
// One int16_t and one float ChannelBuffer that are kept in sync. The sync is // One int16_t and one float ChannelBuffer that are kept in sync. The sync is
@ -144,7 +181,8 @@ class ChannelBuffer {
// fbuf() until the next call to any of the other functions. // fbuf() until the next call to any of the other functions.
class IFChannelBuffer { class IFChannelBuffer {
public: public:
IFChannelBuffer(size_t num_frames, int num_channels, size_t num_bands = 1); IFChannelBuffer(size_t num_frames, size_t num_channels, size_t num_bands = 1);
~IFChannelBuffer();
ChannelBuffer<int16_t>* ibuf(); ChannelBuffer<int16_t>* ibuf();
ChannelBuffer<float>* fbuf(); ChannelBuffer<float>* fbuf();
@ -153,7 +191,13 @@ class IFChannelBuffer {
size_t num_frames() const { return ibuf_.num_frames(); } size_t num_frames() const { return ibuf_.num_frames(); }
size_t num_frames_per_band() const { return ibuf_.num_frames_per_band(); } size_t num_frames_per_band() const { return ibuf_.num_frames_per_band(); }
int num_channels() const { return ibuf_.num_channels(); } size_t num_channels() const {
return ivalid_ ? ibuf_.num_channels() : fbuf_.num_channels();
}
void set_num_channels(size_t num_channels) {
ibuf_.set_num_channels(num_channels);
fbuf_.set_num_channels(num_channels);
}
size_t num_bands() const { return ibuf_.num_bands(); } size_t num_bands() const { return ibuf_.num_bands(); }
private: private:
@ -168,4 +212,4 @@ class IFChannelBuffer {
} // namespace webrtc } // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_CHANNEL_BUFFER_H_ #endif // COMMON_AUDIO_CHANNEL_BUFFER_H_

File diff suppressed because it is too large Load Diff

View File

@ -1,25 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_COMMON_AUDIO_FFT4G_H_
#define WEBRTC_COMMON_AUDIO_FFT4G_H_
#if defined(__cplusplus)
extern "C" {
#endif
// Refer to fft4g.c for documentation.
void WebRtc_rdft(size_t n, int isgn, float *a, size_t *ip, float *w);
#if defined(__cplusplus)
}
#endif
#endif // WEBRTC_COMMON_AUDIO_FFT4G_H_

View File

@ -1,116 +0,0 @@
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/common_audio/fir_filter.h"
#include <assert.h>
#include <string.h>
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/common_audio/fir_filter_neon.h"
#include "webrtc/common_audio/fir_filter_sse.h"
#include "webrtc/system_wrappers/include/cpu_features_wrapper.h"
namespace webrtc {
class FIRFilterC : public FIRFilter {
public:
FIRFilterC(const float* coefficients,
size_t coefficients_length);
void Filter(const float* in, size_t length, float* out) override;
private:
size_t coefficients_length_;
size_t state_length_;
rtc::scoped_ptr<float[]> coefficients_;
rtc::scoped_ptr<float[]> state_;
};
FIRFilter* FIRFilter::Create(const float* coefficients,
size_t coefficients_length,
size_t max_input_length) {
if (!coefficients || coefficients_length <= 0 || max_input_length <= 0) {
assert(false);
return NULL;
}
FIRFilter* filter = NULL;
// If we know the minimum architecture at compile time, avoid CPU detection.
#if defined(WEBRTC_ARCH_X86_FAMILY)
#if defined(__SSE2__)
filter =
new FIRFilterSSE2(coefficients, coefficients_length, max_input_length);
#else
// x86 CPU detection required.
if (WebRtc_GetCPUInfo(kSSE2)) {
filter =
new FIRFilterSSE2(coefficients, coefficients_length, max_input_length);
} else {
filter = new FIRFilterC(coefficients, coefficients_length);
}
#endif
#elif defined(WEBRTC_HAS_NEON)
filter =
new FIRFilterNEON(coefficients, coefficients_length, max_input_length);
#elif defined(WEBRTC_DETECT_NEON)
if (WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON) {
filter =
new FIRFilterNEON(coefficients, coefficients_length, max_input_length);
} else {
filter = new FIRFilterC(coefficients, coefficients_length);
}
#else
filter = new FIRFilterC(coefficients, coefficients_length);
#endif
return filter;
}
FIRFilterC::FIRFilterC(const float* coefficients, size_t coefficients_length)
: coefficients_length_(coefficients_length),
state_length_(coefficients_length - 1),
coefficients_(new float[coefficients_length_]),
state_(new float[state_length_]) {
for (size_t i = 0; i < coefficients_length_; ++i) {
coefficients_[i] = coefficients[coefficients_length_ - i - 1];
}
memset(state_.get(), 0, state_length_ * sizeof(state_[0]));
}
void FIRFilterC::Filter(const float* in, size_t length, float* out) {
assert(length > 0);
// Convolves the input signal |in| with the filter kernel |coefficients_|
// taking into account the previous state.
for (size_t i = 0; i < length; ++i) {
out[i] = 0.f;
size_t j;
for (j = 0; state_length_ > i && j < state_length_ - i; ++j) {
out[i] += state_[i + j] * coefficients_[j];
}
for (; j < coefficients_length_; ++j) {
out[i] += in[j + i - state_length_] * coefficients_[j];
}
}
// Update current state.
if (length >= state_length_) {
memcpy(
state_.get(), &in[length - state_length_], state_length_ * sizeof(*in));
} else {
memmove(state_.get(),
&state_[length],
(state_length_ - length) * sizeof(state_[0]));
memcpy(&state_[state_length_ - length], in, length * sizeof(*in));
}
}
} // namespace webrtc

View File

@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#ifndef WEBRTC_COMMON_AUDIO_FIR_FILTER_H_ #ifndef COMMON_AUDIO_FIR_FILTER_H_
#define WEBRTC_COMMON_AUDIO_FIR_FILTER_H_ #define COMMON_AUDIO_FIR_FILTER_H_
#include <string.h> #include <string.h>
@ -18,16 +18,6 @@ namespace webrtc {
// Finite Impulse Response filter using floating-point arithmetic. // Finite Impulse Response filter using floating-point arithmetic.
class FIRFilter { class FIRFilter {
public: public:
// Creates a filter with the given coefficients. All initial state values will
// be zeros.
// The length of the chunks fed to the filter should never be greater than
// |max_input_length|. This is needed because, when vectorizing it is
// necessary to concatenate the input after the state, and resizing this array
// dynamically is expensive.
static FIRFilter* Create(const float* coefficients,
size_t coefficients_length,
size_t max_input_length);
virtual ~FIRFilter() {} virtual ~FIRFilter() {}
// Filters the |in| data supplied. // Filters the |in| data supplied.
@ -37,4 +27,4 @@ class FIRFilter {
} // namespace webrtc } // namespace webrtc
#endif // WEBRTC_COMMON_AUDIO_FIR_FILTER_H_ #endif // COMMON_AUDIO_FIR_FILTER_H_

View File

@ -0,0 +1,88 @@
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "common_audio/fir_filter_avx2.h"
#include <immintrin.h>
#include <stdint.h>
#include <string.h>
#include <xmmintrin.h>
#include "rtc_base/checks.h"
#include "rtc_base/memory/aligned_malloc.h"
namespace webrtc {
FIRFilterAVX2::FIRFilterAVX2(const float* unaligned_coefficients,
size_t unaligned_coefficients_length,
size_t max_input_length)
: // Closest higher multiple of eight.
coefficients_length_((unaligned_coefficients_length + 7) & ~0x07),
state_length_(coefficients_length_ - 1),
coefficients_(static_cast<float*>(
AlignedMalloc(sizeof(float) * coefficients_length_, 32))),
state_(static_cast<float*>(
AlignedMalloc(sizeof(float) * (max_input_length + state_length_),
32))) {
// Add zeros at the end of the coefficients.
RTC_DCHECK_GE(coefficients_length_, unaligned_coefficients_length);
size_t padding = coefficients_length_ - unaligned_coefficients_length;
memset(coefficients_.get(), 0, padding * sizeof(coefficients_[0]));
// The coefficients are reversed to compensate for the order in which the
// input samples are acquired (most recent last).
for (size_t i = 0; i < unaligned_coefficients_length; ++i) {
coefficients_[i + padding] =
unaligned_coefficients[unaligned_coefficients_length - i - 1];
}
memset(state_.get(), 0,
(max_input_length + state_length_) * sizeof(state_[0]));
}
FIRFilterAVX2::~FIRFilterAVX2() = default;
void FIRFilterAVX2::Filter(const float* in, size_t length, float* out) {
RTC_DCHECK_GT(length, 0);
memcpy(&state_[state_length_], in, length * sizeof(*in));
// Convolves the input signal |in| with the filter kernel |coefficients_|
// taking into account the previous state.
for (size_t i = 0; i < length; ++i) {
float* in_ptr = &state_[i];
float* coef_ptr = coefficients_.get();
__m256 m_sum = _mm256_setzero_ps();
__m256 m_in;
// Depending on if the pointer is aligned with 32 bytes or not it is loaded
// differently.
if (reinterpret_cast<uintptr_t>(in_ptr) & 0x1F) {
for (size_t j = 0; j < coefficients_length_; j += 8) {
m_in = _mm256_loadu_ps(in_ptr + j);
m_sum = _mm256_fmadd_ps(m_in, _mm256_load_ps(coef_ptr + j), m_sum);
}
} else {
for (size_t j = 0; j < coefficients_length_; j += 8) {
m_in = _mm256_load_ps(in_ptr + j);
m_sum = _mm256_fmadd_ps(m_in, _mm256_load_ps(coef_ptr + j), m_sum);
}
}
__m128 m128_sum = _mm_add_ps(_mm256_extractf128_ps(m_sum, 0),
_mm256_extractf128_ps(m_sum, 1));
m128_sum = _mm_add_ps(_mm_movehl_ps(m128_sum, m128_sum), m128_sum);
_mm_store_ss(out + i,
_mm_add_ss(m128_sum, _mm_shuffle_ps(m128_sum, m128_sum, 1)));
}
// Update current state.
memmove(state_.get(), &state_[length], state_length_ * sizeof(state_[0]));
}
} // namespace webrtc

View File

@ -0,0 +1,41 @@
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef COMMON_AUDIO_FIR_FILTER_AVX2_H_
#define COMMON_AUDIO_FIR_FILTER_AVX2_H_
#include <stddef.h>
#include <memory>
#include "common_audio/fir_filter.h"
#include "rtc_base/memory/aligned_malloc.h"
namespace webrtc {
class FIRFilterAVX2 : public FIRFilter {
public:
FIRFilterAVX2(const float* coefficients,
size_t coefficients_length,
size_t max_input_length);
~FIRFilterAVX2() override;
void Filter(const float* in, size_t length, float* out) override;
private:
const size_t coefficients_length_;
const size_t state_length_;
std::unique_ptr<float[], AlignedFreeDeleter> coefficients_;
std::unique_ptr<float[], AlignedFreeDeleter> state_;
};
} // namespace webrtc
#endif // COMMON_AUDIO_FIR_FILTER_AVX2_H_

View File

@ -0,0 +1,61 @@
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "common_audio/fir_filter_c.h"
#include <string.h>
#include <memory>
#include "rtc_base/checks.h"
namespace webrtc {
FIRFilterC::~FIRFilterC() {}
FIRFilterC::FIRFilterC(const float* coefficients, size_t coefficients_length)
: coefficients_length_(coefficients_length),
state_length_(coefficients_length - 1),
coefficients_(new float[coefficients_length_]),
state_(new float[state_length_]) {
for (size_t i = 0; i < coefficients_length_; ++i) {
coefficients_[i] = coefficients[coefficients_length_ - i - 1];
}
memset(state_.get(), 0, state_length_ * sizeof(state_[0]));
}
void FIRFilterC::Filter(const float* in, size_t length, float* out) {
RTC_DCHECK_GT(length, 0);
// Convolves the input signal |in| with the filter kernel |coefficients_|
// taking into account the previous state.
for (size_t i = 0; i < length; ++i) {
out[i] = 0.f;
size_t j;
for (j = 0; state_length_ > i && j < state_length_ - i; ++j) {
out[i] += state_[i + j] * coefficients_[j];
}
for (; j < coefficients_length_; ++j) {
out[i] += in[j + i - state_length_] * coefficients_[j];
}
}
// Update current state.
if (length >= state_length_) {
memcpy(state_.get(), &in[length - state_length_],
state_length_ * sizeof(*in));
} else {
memmove(state_.get(), &state_[length],
(state_length_ - length) * sizeof(state_[0]));
memcpy(&state_[state_length_ - length], in, length * sizeof(*in));
}
}
} // namespace webrtc

View File

@ -0,0 +1,38 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef COMMON_AUDIO_FIR_FILTER_C_H_
#define COMMON_AUDIO_FIR_FILTER_C_H_
#include <string.h>
#include <memory>
#include "common_audio/fir_filter.h"
namespace webrtc {
class FIRFilterC : public FIRFilter {
public:
FIRFilterC(const float* coefficients, size_t coefficients_length);
~FIRFilterC() override;
void Filter(const float* in, size_t length, float* out) override;
private:
size_t coefficients_length_;
size_t state_length_;
std::unique_ptr<float[]> coefficients_;
std::unique_ptr<float[]> state_;
};
} // namespace webrtc
#endif // COMMON_AUDIO_FIR_FILTER_C_H_

View File

@ -0,0 +1,58 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "common_audio/fir_filter_factory.h"
#include "common_audio/fir_filter_c.h"
#include "rtc_base/checks.h"
#include "rtc_base/system/arch.h"
#if defined(WEBRTC_HAS_NEON)
#include "common_audio/fir_filter_neon.h"
#elif defined(WEBRTC_ARCH_X86_FAMILY)
#include "common_audio/fir_filter_avx2.h"
#include "common_audio/fir_filter_sse.h"
#include "system_wrappers/include/cpu_features_wrapper.h" // kSSE2, WebRtc_G...
#endif
namespace webrtc {
FIRFilter* CreateFirFilter(const float* coefficients,
size_t coefficients_length,
size_t max_input_length) {
if (!coefficients || coefficients_length <= 0 || max_input_length <= 0) {
RTC_NOTREACHED();
return nullptr;
}
FIRFilter* filter = nullptr;
// If we know the minimum architecture at compile time, avoid CPU detection.
#if defined(WEBRTC_ARCH_X86_FAMILY)
// x86 CPU detection required.
if (GetCPUInfo(kAVX2)) {
filter =
new FIRFilterAVX2(coefficients, coefficients_length, max_input_length);
} else if (GetCPUInfo(kSSE2)) {
filter =
new FIRFilterSSE2(coefficients, coefficients_length, max_input_length);
} else {
filter = new FIRFilterC(coefficients, coefficients_length);
}
#elif defined(WEBRTC_HAS_NEON)
filter =
new FIRFilterNEON(coefficients, coefficients_length, max_input_length);
#else
filter = new FIRFilterC(coefficients, coefficients_length);
#endif
return filter;
}
} // namespace webrtc

View File

@ -0,0 +1,32 @@
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef COMMON_AUDIO_FIR_FILTER_FACTORY_H_
#define COMMON_AUDIO_FIR_FILTER_FACTORY_H_
#include <string.h>
namespace webrtc {
class FIRFilter;
// Creates a filter with the given coefficients. All initial state values will
// be zeros.
// The length of the chunks fed to the filter should never be greater than
// |max_input_length|. This is needed because, when vectorizing it is
// necessary to concatenate the input after the state, and resizing this array
// dynamically is expensive.
FIRFilter* CreateFirFilter(const float* coefficients,
size_t coefficients_length,
size_t max_input_length);
} // namespace webrtc
#endif // COMMON_AUDIO_FIR_FILTER_FACTORY_H_

View File

@ -8,16 +8,18 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include "webrtc/common_audio/fir_filter_neon.h" #include "common_audio/fir_filter_neon.h"
#include <arm_neon.h> #include <arm_neon.h>
#include <assert.h>
#include <string.h> #include <string.h>
#include "webrtc/system_wrappers/include/aligned_malloc.h" #include "rtc_base/checks.h"
#include "rtc_base/memory/aligned_malloc.h"
namespace webrtc { namespace webrtc {
FIRFilterNEON::~FIRFilterNEON() {}
FIRFilterNEON::FIRFilterNEON(const float* coefficients, FIRFilterNEON::FIRFilterNEON(const float* coefficients,
size_t coefficients_length, size_t coefficients_length,
size_t max_input_length) size_t max_input_length)
@ -37,13 +39,12 @@ FIRFilterNEON::FIRFilterNEON(const float* coefficients,
for (size_t i = 0; i < coefficients_length; ++i) { for (size_t i = 0; i < coefficients_length; ++i) {
coefficients_[i + padding] = coefficients[coefficients_length - i - 1]; coefficients_[i + padding] = coefficients[coefficients_length - i - 1];
} }
memset(state_.get(), memset(state_.get(), 0.f,
0.f,
(max_input_length + state_length_) * sizeof(state_[0])); (max_input_length + state_length_) * sizeof(state_[0]));
} }
void FIRFilterNEON::Filter(const float* in, size_t length, float* out) { void FIRFilterNEON::Filter(const float* in, size_t length, float* out) {
assert(length > 0); RTC_DCHECK_GT(length, 0);
memcpy(&state_[state_length_], in, length * sizeof(*in)); memcpy(&state_[state_length_], in, length * sizeof(*in));

View File

@ -8,12 +8,13 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#ifndef WEBRTC_COMMON_AUDIO_FIR_FILTER_NEON_H_ #ifndef COMMON_AUDIO_FIR_FILTER_NEON_H_
#define WEBRTC_COMMON_AUDIO_FIR_FILTER_NEON_H_ #define COMMON_AUDIO_FIR_FILTER_NEON_H_
#include "webrtc/base/scoped_ptr.h" #include <memory>
#include "webrtc/common_audio/fir_filter.h"
#include "webrtc/system_wrappers/include/aligned_malloc.h" #include "common_audio/fir_filter.h"
#include "rtc_base/memory/aligned_malloc.h"
namespace webrtc { namespace webrtc {
@ -22,16 +23,17 @@ class FIRFilterNEON : public FIRFilter {
FIRFilterNEON(const float* coefficients, FIRFilterNEON(const float* coefficients,
size_t coefficients_length, size_t coefficients_length,
size_t max_input_length); size_t max_input_length);
~FIRFilterNEON() override;
void Filter(const float* in, size_t length, float* out) override; void Filter(const float* in, size_t length, float* out) override;
private: private:
size_t coefficients_length_; size_t coefficients_length_;
size_t state_length_; size_t state_length_;
rtc::scoped_ptr<float[], AlignedFreeDeleter> coefficients_; std::unique_ptr<float[], AlignedFreeDeleter> coefficients_;
rtc::scoped_ptr<float[], AlignedFreeDeleter> state_; std::unique_ptr<float[], AlignedFreeDeleter> state_;
}; };
} // namespace webrtc } // namespace webrtc
#endif // WEBRTC_COMMON_AUDIO_FIR_FILTER_NEON_H_ #endif // COMMON_AUDIO_FIR_FILTER_NEON_H_

View File

@ -8,16 +8,19 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include "webrtc/common_audio/fir_filter_sse.h" #include "common_audio/fir_filter_sse.h"
#include <assert.h> #include <stdint.h>
#include <string.h> #include <string.h>
#include <xmmintrin.h> #include <xmmintrin.h>
#include "webrtc/system_wrappers/include/aligned_malloc.h" #include "rtc_base/checks.h"
#include "rtc_base/memory/aligned_malloc.h"
namespace webrtc { namespace webrtc {
FIRFilterSSE2::~FIRFilterSSE2() {}
FIRFilterSSE2::FIRFilterSSE2(const float* coefficients, FIRFilterSSE2::FIRFilterSSE2(const float* coefficients,
size_t coefficients_length, size_t coefficients_length,
size_t max_input_length) size_t max_input_length)
@ -37,13 +40,12 @@ FIRFilterSSE2::FIRFilterSSE2(const float* coefficients,
for (size_t i = 0; i < coefficients_length; ++i) { for (size_t i = 0; i < coefficients_length; ++i) {
coefficients_[i + padding] = coefficients[coefficients_length - i - 1]; coefficients_[i + padding] = coefficients[coefficients_length - i - 1];
} }
memset(state_.get(), memset(state_.get(), 0,
0,
(max_input_length + state_length_) * sizeof(state_[0])); (max_input_length + state_length_) * sizeof(state_[0]));
} }
void FIRFilterSSE2::Filter(const float* in, size_t length, float* out) { void FIRFilterSSE2::Filter(const float* in, size_t length, float* out) {
assert(length > 0); RTC_DCHECK_GT(length, 0);
memcpy(&state_[state_length_], in, length * sizeof(*in)); memcpy(&state_[state_length_], in, length * sizeof(*in));

Some files were not shown because too many files have changed in this diff Show More